[ 542.605294] env[62519]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62519) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 542.605742] env[62519]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62519) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 542.605742] env[62519]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62519) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 542.606141] env[62519]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 542.707167] env[62519]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62519) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 542.717045] env[62519]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62519) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 543.321163] env[62519]: INFO nova.virt.driver [None req-d021a9dd-07aa-45b4-a0f4-2269c7f00007 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 543.392734] env[62519]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.392999] env[62519]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.393131] env[62519]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62519) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 546.698660] env[62519]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-9a955d44-1121-4c52-b6b0-d4794480421e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.716101] env[62519]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62519) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 546.716235] env[62519]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-d6cf0bd3-bad5-4303-b6e0-179c01f82e36 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.766427] env[62519]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 63ab9. [ 546.766548] env[62519]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.374s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.767150] env[62519]: INFO nova.virt.vmwareapi.driver [None req-d021a9dd-07aa-45b4-a0f4-2269c7f00007 None None] VMware vCenter version: 7.0.3 [ 546.770540] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7319ab16-e7fb-446d-824d-10aa53d22e69 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.788444] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1795aae2-f81b-45af-88aa-0fadc3b24689 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.794225] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d437e7d-8e6e-46ab-bed7-46dc0b8d47dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.800875] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2f05dc-2e95-4a25-a947-66f04e1a3b53 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.813846] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86963bb-34ee-44bf-85f0-e437f1ab3aae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.819655] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db35f06e-d765-4089-8f63-e31b9b7b2b67 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.850842] env[62519]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-00483f56-27d4-4c21-ad48-11791ea3a4b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.855901] env[62519]: DEBUG nova.virt.vmwareapi.driver [None req-d021a9dd-07aa-45b4-a0f4-2269c7f00007 None None] Extension org.openstack.compute already exists. {{(pid=62519) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 546.858683] env[62519]: INFO nova.compute.provider_config [None req-d021a9dd-07aa-45b4-a0f4-2269c7f00007 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 547.361866] env[62519]: DEBUG nova.context [None req-d021a9dd-07aa-45b4-a0f4-2269c7f00007 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),b795167d-faa6-4c45-8625-5c633876d417(cell1) {{(pid=62519) load_cells /opt/stack/nova/nova/context.py:464}} [ 547.364326] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.364561] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.365274] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.365788] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Acquiring lock "b795167d-faa6-4c45-8625-5c633876d417" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.366035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Lock "b795167d-faa6-4c45-8625-5c633876d417" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.367126] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Lock "b795167d-faa6-4c45-8625-5c633876d417" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.389022] env[62519]: INFO dbcounter [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Registered counter for database nova_cell0 [ 547.397193] env[62519]: INFO dbcounter [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Registered counter for database nova_cell1 [ 547.400495] env[62519]: DEBUG oslo_db.sqlalchemy.engines [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62519) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 547.400844] env[62519]: DEBUG oslo_db.sqlalchemy.engines [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62519) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 547.405745] env[62519]: ERROR nova.db.main.api [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.405745] env[62519]: result = function(*args, **kwargs) [ 547.405745] env[62519]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 547.405745] env[62519]: return func(*args, **kwargs) [ 547.405745] env[62519]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 547.405745] env[62519]: result = fn(*args, **kwargs) [ 547.405745] env[62519]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 547.405745] env[62519]: return f(*args, **kwargs) [ 547.405745] env[62519]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 547.405745] env[62519]: return db.service_get_minimum_version(context, binaries) [ 547.405745] env[62519]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 547.405745] env[62519]: _check_db_access() [ 547.405745] env[62519]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 547.405745] env[62519]: stacktrace = ''.join(traceback.format_stack()) [ 547.405745] env[62519]: [ 547.406480] env[62519]: ERROR nova.db.main.api [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.406480] env[62519]: result = function(*args, **kwargs) [ 547.406480] env[62519]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 547.406480] env[62519]: return func(*args, **kwargs) [ 547.406480] env[62519]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 547.406480] env[62519]: result = fn(*args, **kwargs) [ 547.406480] env[62519]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 547.406480] env[62519]: return f(*args, **kwargs) [ 547.406480] env[62519]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 547.406480] env[62519]: return db.service_get_minimum_version(context, binaries) [ 547.406480] env[62519]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 547.406480] env[62519]: _check_db_access() [ 547.406480] env[62519]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 547.406480] env[62519]: stacktrace = ''.join(traceback.format_stack()) [ 547.406480] env[62519]: [ 547.406880] env[62519]: WARNING nova.objects.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Failed to get minimum service version for cell b795167d-faa6-4c45-8625-5c633876d417 [ 547.407030] env[62519]: WARNING nova.objects.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 547.407462] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Acquiring lock "singleton_lock" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.407621] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Acquired lock "singleton_lock" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.407855] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Releasing lock "singleton_lock" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.408212] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Full set of CONF: {{(pid=62519) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 547.408383] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ******************************************************************************** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 547.408518] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] Configuration options gathered from: {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 547.408656] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 547.408850] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 547.408982] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ================================================================================ {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 547.409207] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] allow_resize_to_same_host = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.409379] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] arq_binding_timeout = 300 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.409511] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] backdoor_port = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.409639] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] backdoor_socket = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.409801] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] block_device_allocate_retries = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.409987] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] block_device_allocate_retries_interval = 3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.410191] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cert = self.pem {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.410360] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.410528] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute_monitors = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.410694] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] config_dir = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.410860] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] config_drive_format = iso9660 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.410996] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.411182] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] config_source = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.411372] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] console_host = devstack {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.411540] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] control_exchange = nova {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.411699] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cpu_allocation_ratio = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.411891] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] daemon = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.412079] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] debug = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.412243] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] default_access_ip_network_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.412408] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] default_availability_zone = nova {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.412564] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] default_ephemeral_format = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.412722] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] default_green_pool_size = 1000 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.412974] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.413180] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] default_schedule_zone = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.413341] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] disk_allocation_ratio = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.413501] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] enable_new_services = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.413677] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] enabled_apis = ['osapi_compute'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.413841] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] enabled_ssl_apis = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.414011] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] flat_injected = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.414180] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] force_config_drive = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.414368] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] force_raw_images = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.414542] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] graceful_shutdown_timeout = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.414703] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] heal_instance_info_cache_interval = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.414918] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] host = cpu-1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.415104] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.415271] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] initial_disk_allocation_ratio = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.415433] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] initial_ram_allocation_ratio = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.415647] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.415812] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] instance_build_timeout = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.416031] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] instance_delete_interval = 300 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.416253] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] instance_format = [instance: %(uuid)s] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.416439] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] instance_name_template = instance-%08x {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.416606] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] instance_usage_audit = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.416779] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] instance_usage_audit_period = month {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.416941] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.417119] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] instances_path = /opt/stack/data/nova/instances {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.417306] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] internal_service_availability_zone = internal {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.417474] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] key = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.417633] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] live_migration_retry_count = 30 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.417799] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] log_color = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.417962] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] log_config_append = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.418142] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.418305] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] log_dir = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.418461] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] log_file = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.418590] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] log_options = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.418747] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] log_rotate_interval = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.418919] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] log_rotate_interval_type = days {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.419214] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] log_rotation_type = none {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.419361] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.419491] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.419662] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.419825] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.419951] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.420135] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] long_rpc_timeout = 1800 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.420316] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] max_concurrent_builds = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.420493] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] max_concurrent_live_migrations = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.420653] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] max_concurrent_snapshots = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.420812] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] max_local_block_devices = 3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.420969] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] max_logfile_count = 30 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.421149] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] max_logfile_size_mb = 200 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.421309] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] maximum_instance_delete_attempts = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.421476] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] metadata_listen = 0.0.0.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.421643] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] metadata_listen_port = 8775 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.421811] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] metadata_workers = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.421990] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] migrate_max_retries = -1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.422196] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] mkisofs_cmd = genisoimage {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.422407] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] my_block_storage_ip = 10.180.1.21 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.422542] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] my_ip = 10.180.1.21 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.422747] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.422910] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] network_allocate_retries = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.423102] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.423271] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] osapi_compute_listen = 0.0.0.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.423467] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] osapi_compute_listen_port = 8774 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.423640] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] osapi_compute_unique_server_name_scope = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.423812] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] osapi_compute_workers = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.424005] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] password_length = 12 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.424196] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] periodic_enable = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.424361] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] periodic_fuzzy_delay = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.424531] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] pointer_model = usbtablet {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.424698] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] preallocate_images = none {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.424859] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] publish_errors = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.424991] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] pybasedir = /opt/stack/nova {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.425163] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ram_allocation_ratio = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.425324] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] rate_limit_burst = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.425492] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] rate_limit_except_level = CRITICAL {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.425652] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] rate_limit_interval = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.425811] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] reboot_timeout = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.426012] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] reclaim_instance_interval = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.426186] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] record = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.426373] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] reimage_timeout_per_gb = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.426554] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] report_interval = 120 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.426720] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] rescue_timeout = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.426880] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] reserved_host_cpus = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.427083] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] reserved_host_disk_mb = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.427253] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] reserved_host_memory_mb = 512 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.427568] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] reserved_huge_pages = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.427798] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] resize_confirm_window = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.427971] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] resize_fs_using_block_device = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.428150] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] resume_guests_state_on_host_boot = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.428322] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.428485] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] rpc_response_timeout = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.428643] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] run_external_periodic_tasks = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.428808] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] running_deleted_instance_action = reap {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.429019] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] running_deleted_instance_poll_interval = 1800 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.429143] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] running_deleted_instance_timeout = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.429300] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler_instance_sync_interval = 120 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.429495] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_down_time = 720 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.429670] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] servicegroup_driver = db {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.429826] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] shell_completion = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.430011] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] shelved_offload_time = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.430204] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] shelved_poll_interval = 3600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.430375] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] shutdown_timeout = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.430536] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] source_is_ipv6 = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.430696] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ssl_only = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.430948] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.431135] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] sync_power_state_interval = 600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.431299] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] sync_power_state_pool_size = 1000 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.431467] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] syslog_log_facility = LOG_USER {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.431624] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] tempdir = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.431781] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] timeout_nbd = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.431945] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] transport_url = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.432122] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] update_resources_interval = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.432285] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] use_cow_images = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.432487] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] use_eventlog = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.432684] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] use_journal = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.432851] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] use_json = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.433050] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] use_rootwrap_daemon = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.433232] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] use_stderr = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.433396] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] use_syslog = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.433553] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vcpu_pin_set = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.433722] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plugging_is_fatal = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.433896] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plugging_timeout = 300 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.434067] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] virt_mkfs = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.434232] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] volume_usage_poll_interval = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.434442] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] watch_log_file = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.434624] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] web = /usr/share/spice-html5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 547.434805] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.434981] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.435217] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.435409] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_concurrency.disable_process_locking = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.435994] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.436212] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.436391] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.436567] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.436741] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.436941] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.437103] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.auth_strategy = keystone {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.437283] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.compute_link_prefix = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.437463] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.437639] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.dhcp_domain = novalocal {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.437812] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.enable_instance_password = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.437979] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.glance_link_prefix = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.438195] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.438383] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.438573] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.instance_list_per_project_cells = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.438752] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.list_records_by_skipping_down_cells = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.438920] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.local_metadata_per_cell = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.439106] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.max_limit = 1000 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.439280] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.metadata_cache_expiration = 15 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.439457] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.neutron_default_tenant_id = default {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.439632] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.response_validation = warn {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.439804] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.use_neutron_default_nets = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.439976] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.440159] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.440328] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.440502] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.440675] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.vendordata_dynamic_targets = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.440839] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.vendordata_jsonfile_path = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.441032] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.441263] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.backend = dogpile.cache.memcached {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.441439] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.backend_argument = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.441630] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.backend_expiration_time = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.441812] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.config_prefix = cache.oslo {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.441982] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.dead_timeout = 60.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.442166] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.debug_cache_backend = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.442333] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.enable_retry_client = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.442502] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.enable_socket_keepalive = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.442666] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.enabled = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.442830] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.enforce_fips_mode = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.442995] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.expiration_time = 600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.443178] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.hashclient_retry_attempts = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.443345] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.hashclient_retry_delay = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.443510] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_dead_retry = 300 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.443669] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_password = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.443873] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.444067] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.444242] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_pool_maxsize = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.444409] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.444572] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_sasl_enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.444753] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.444921] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_socket_timeout = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.445096] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.memcache_username = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.445265] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.proxies = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.445429] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.redis_db = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.445589] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.redis_password = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.445759] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.redis_sentinel_service_name = mymaster {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.445954] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.446150] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.redis_server = localhost:6379 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.446320] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.redis_socket_timeout = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.446481] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.redis_username = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.446645] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.retry_attempts = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.446828] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.retry_delay = 0.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.447038] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.socket_keepalive_count = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.447219] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.socket_keepalive_idle = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.447385] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.socket_keepalive_interval = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.447544] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.tls_allowed_ciphers = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.447705] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.tls_cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.447865] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.tls_certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.448040] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.tls_enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.448207] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cache.tls_keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.448376] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.auth_section = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.448550] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.auth_type = password {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.448712] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.448887] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.catalog_info = volumev3::publicURL {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.449064] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.449234] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.449399] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.cross_az_attach = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.449563] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.debug = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.449725] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.endpoint_template = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.449921] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.http_retries = 3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.450131] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.450302] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.450479] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.os_region_name = RegionOne {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.450645] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.450808] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cinder.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.450983] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.451164] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.cpu_dedicated_set = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.451327] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.cpu_shared_set = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.451492] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.image_type_exclude_list = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.451659] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.451823] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.max_concurrent_disk_ops = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.451988] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.max_disk_devices_to_attach = -1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.452169] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.452339] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.452503] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.resource_provider_association_refresh = 300 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.452664] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.452862] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.shutdown_retry_interval = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.453138] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.453345] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] conductor.workers = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.453531] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] console.allowed_origins = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.453696] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] console.ssl_ciphers = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.453876] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] console.ssl_minimum_version = default {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.454059] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] consoleauth.enforce_session_timeout = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.454234] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] consoleauth.token_ttl = 600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.454405] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.454564] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.454730] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.454892] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.connect_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.455072] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.connect_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.455237] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.endpoint_override = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.455400] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.455557] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.455715] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.max_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.455916] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.min_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.456134] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.region_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.456307] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.retriable_status_codes = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.456466] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.service_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.456636] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.service_type = accelerator {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.456800] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.456959] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.status_code_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.457203] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.status_code_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.457476] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.457476] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.457722] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] cyborg.version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.457904] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.backend = sqlalchemy {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460195] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.connection = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460195] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.connection_debug = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460195] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.connection_parameters = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460195] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.connection_recycle_time = 3600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460195] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.connection_trace = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460195] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.db_inc_retry_interval = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460195] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.db_max_retries = 20 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460446] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.db_max_retry_interval = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460446] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.db_retry_interval = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460446] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.max_overflow = 50 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460446] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.max_pool_size = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460446] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.max_retries = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460446] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460446] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.mysql_wsrep_sync_wait = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460657] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.pool_timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460657] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.retry_interval = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460716] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.slave_connection = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.460873] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.sqlite_synchronous = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.461049] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] database.use_db_reconnect = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.461234] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.backend = sqlalchemy {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.461404] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.connection = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.461570] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.connection_debug = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.461738] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.connection_parameters = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.461922] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.connection_recycle_time = 3600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.462171] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.connection_trace = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.462404] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.db_inc_retry_interval = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.462585] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.db_max_retries = 20 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.462752] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.db_max_retry_interval = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.462921] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.db_retry_interval = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.463100] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.max_overflow = 50 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.463269] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.max_pool_size = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.463433] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.max_retries = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.463603] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.463764] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.463925] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.pool_timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.464107] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.retry_interval = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.464272] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.slave_connection = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.464437] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] api_database.sqlite_synchronous = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.464613] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] devices.enabled_mdev_types = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.464789] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.464991] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ephemeral_storage_encryption.default_format = luks {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.465205] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ephemeral_storage_encryption.enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.465474] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.465682] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.api_servers = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.465883] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.466066] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.466246] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.466410] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.connect_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.466574] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.connect_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.466739] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.debug = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.466906] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.default_trusted_certificate_ids = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.467107] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.enable_certificate_validation = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.467279] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.enable_rbd_download = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.467439] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.endpoint_override = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.467606] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.467768] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.467930] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.max_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.468127] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.min_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.468299] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.num_retries = 3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.468469] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.rbd_ceph_conf = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.468632] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.rbd_connect_timeout = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.468800] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.rbd_pool = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.468965] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.rbd_user = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.469141] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.region_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.469300] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.retriable_status_codes = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.469458] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.service_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.469632] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.service_type = image {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.469794] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.469953] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.status_code_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.470128] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.status_code_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.470289] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.470469] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.470633] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.verify_glance_signatures = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.470790] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] glance.version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.470995] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] guestfs.debug = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.471173] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.auth_section = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.471342] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.auth_type = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.471502] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.471660] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.471826] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.471983] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.connect_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.472161] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.connect_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.472407] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.endpoint_override = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.472602] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.472767] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.472932] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.max_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.473117] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.min_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.473279] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.region_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.473436] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.retriable_status_codes = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.473593] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.service_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.473763] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.service_type = shared-file-system {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.473948] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.share_apply_policy_timeout = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.474130] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.474293] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.status_code_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.474451] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.status_code_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.474610] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.474788] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.474949] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] manila.version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.475133] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] mks.enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.475509] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.475706] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] image_cache.manager_interval = 2400 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.475904] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] image_cache.precache_concurrency = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.476095] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] image_cache.remove_unused_base_images = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.476275] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.476445] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.476627] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] image_cache.subdirectory_name = _base {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.476806] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.api_max_retries = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.476983] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.api_retry_interval = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.477176] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.auth_section = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.477343] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.auth_type = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.477506] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.477663] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.477828] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.477994] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.conductor_group = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.478173] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.connect_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.478363] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.connect_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.478531] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.endpoint_override = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.478697] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.478858] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.479036] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.max_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.479203] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.min_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.479369] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.peer_list = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.479527] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.region_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.479682] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.retriable_status_codes = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.479845] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.serial_console_state_timeout = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.480008] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.service_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.480191] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.service_type = baremetal {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.480351] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.shard = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.480515] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.480673] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.status_code_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.480830] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.status_code_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.480987] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.481182] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.481365] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ironic.version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.481555] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.481728] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] key_manager.fixed_key = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.481954] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.482143] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.barbican_api_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.482308] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.barbican_endpoint = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.482478] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.barbican_endpoint_type = public {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.482637] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.barbican_region_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.482797] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.482955] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.483134] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.483298] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.483456] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.483621] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.number_of_retries = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.483786] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.retry_delay = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.483943] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.send_service_user_token = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.484127] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.484302] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.484488] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.verify_ssl = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.484646] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican.verify_ssl_path = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.484814] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican_service_user.auth_section = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.484980] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican_service_user.auth_type = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.485191] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican_service_user.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.485362] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican_service_user.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.485531] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican_service_user.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.485696] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican_service_user.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.485877] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican_service_user.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.486053] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican_service_user.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.486222] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] barbican_service_user.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.486393] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.approle_role_id = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.486555] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.approle_secret_id = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.486726] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.kv_mountpoint = secret {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.486889] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.kv_path = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.487071] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.kv_version = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.487236] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.namespace = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.487398] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.root_token_id = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.487558] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.ssl_ca_crt_file = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.487725] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.timeout = 60.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.487889] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.use_ssl = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.488079] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.488258] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.488415] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.488579] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.488740] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.connect_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.488898] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.connect_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.489069] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.endpoint_override = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.489233] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.489391] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.489549] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.max_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.489705] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.min_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.489863] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.region_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.490031] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.retriable_status_codes = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.490193] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.service_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.490361] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.service_type = identity {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.490528] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.490692] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.status_code_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.490852] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.status_code_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.491024] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.491209] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.491371] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] keystone.version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.491559] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.ceph_mount_options = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.491939] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.492156] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.connection_uri = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.492327] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.cpu_mode = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.492500] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.cpu_model_extra_flags = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.492672] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.cpu_models = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.492846] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.cpu_power_governor_high = performance {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.493032] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.cpu_power_governor_low = powersave {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.493209] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.cpu_power_management = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.493382] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.493549] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.device_detach_attempts = 8 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.493713] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.device_detach_timeout = 20 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.493880] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.disk_cachemodes = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.494058] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.disk_prefix = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.494233] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.enabled_perf_events = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.494408] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.file_backed_memory = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.494578] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.gid_maps = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.494741] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.hw_disk_discard = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.494901] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.hw_machine_type = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.495086] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.images_rbd_ceph_conf = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.495258] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.495464] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.495672] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.images_rbd_glance_store_name = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.495868] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.images_rbd_pool = rbd {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.496060] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.images_type = default {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.496300] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.images_volume_group = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.496405] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.inject_key = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.496572] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.inject_partition = -2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.496735] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.inject_password = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.496926] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.iscsi_iface = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.497116] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.iser_use_multipath = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.497285] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_bandwidth = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.497450] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.497614] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_downtime = 500 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.497777] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.497959] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.498119] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_inbound_addr = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.498284] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.498446] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_permit_post_copy = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.498605] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_scheme = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.498775] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_timeout_action = abort {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.498936] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_tunnelled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.499112] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_uri = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.499277] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.live_migration_with_native_tls = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.499438] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.max_queues = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.499605] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.499847] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.500025] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.nfs_mount_options = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.500341] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.500519] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.500688] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.num_iser_scan_tries = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.500853] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.num_memory_encrypted_guests = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.501030] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.501201] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.num_pcie_ports = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.501372] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.num_volume_scan_tries = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.501539] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.pmem_namespaces = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.501701] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.quobyte_client_cfg = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.502006] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.502194] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rbd_connect_timeout = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.502365] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.502531] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.502695] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rbd_secret_uuid = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.502854] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rbd_user = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.503035] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.503216] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.remote_filesystem_transport = ssh {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.503377] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rescue_image_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.503536] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rescue_kernel_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.503693] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rescue_ramdisk_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.503873] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.504048] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.rx_queue_size = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.504224] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.smbfs_mount_options = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.504524] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.504705] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.snapshot_compression = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.504868] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.snapshot_image_format = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.505113] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.505283] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.sparse_logical_volumes = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.505449] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.swtpm_enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.505619] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.swtpm_group = tss {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.505785] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.swtpm_user = tss {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.505978] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.sysinfo_serial = unique {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.506160] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.tb_cache_size = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.506320] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.tx_queue_size = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.506483] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.uid_maps = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.506645] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.use_virtio_for_bridges = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.506815] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.virt_type = kvm {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.507017] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.volume_clear = zero {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.507199] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.volume_clear_size = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.507371] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.volume_use_multipath = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.507531] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.vzstorage_cache_path = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.507701] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.507868] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.vzstorage_mount_group = qemu {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.508055] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.vzstorage_mount_opts = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.508228] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.508521] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.508703] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.vzstorage_mount_user = stack {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.508870] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.509056] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.auth_section = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.509235] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.auth_type = password {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.509396] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.509556] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.509718] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.509875] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.connect_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.510043] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.connect_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.510216] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.default_floating_pool = public {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.510373] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.endpoint_override = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.510535] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.extension_sync_interval = 600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.510694] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.http_retries = 3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.510851] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.511016] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.511178] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.max_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.511343] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.511499] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.min_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.511663] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.ovs_bridge = br-int {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.511825] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.physnets = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.511990] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.region_name = RegionOne {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.512162] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.retriable_status_codes = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.512327] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.service_metadata_proxy = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.512483] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.service_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.512647] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.service_type = network {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.512810] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.512967] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.status_code_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.513140] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.status_code_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.513300] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.513474] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.513634] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] neutron.version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.513805] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] notifications.bdms_in_notifications = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.513976] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] notifications.default_level = INFO {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.514150] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] notifications.include_share_mapping = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.514321] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] notifications.notification_format = unversioned {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.514482] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] notifications.notify_on_state_change = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.514656] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.514828] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] pci.alias = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.514994] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] pci.device_spec = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.515173] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] pci.report_in_placement = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.515347] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.auth_section = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.515519] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.auth_type = password {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.515685] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.515846] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.516043] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.516213] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.516373] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.connect_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.516531] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.connect_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.516689] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.default_domain_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.516853] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.default_domain_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.517049] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.domain_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.517213] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.domain_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.517369] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.endpoint_override = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.517527] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.517682] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.517839] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.max_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.517993] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.min_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.518178] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.password = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.518338] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.project_domain_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.518502] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.project_domain_name = Default {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.518666] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.project_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.518835] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.project_name = service {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.519008] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.region_name = RegionOne {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.519180] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.retriable_status_codes = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.519339] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.service_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.519504] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.service_type = placement {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.519665] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.519824] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.status_code_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.519982] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.status_code_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.520155] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.system_scope = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.520311] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.520467] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.trust_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.520622] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.user_domain_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.520790] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.user_domain_name = Default {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.520949] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.user_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.521136] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.username = nova {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.521318] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.521479] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] placement.version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.521658] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.cores = 20 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.521828] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.count_usage_from_placement = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.521997] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.522182] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.injected_file_content_bytes = 10240 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.522351] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.injected_file_path_length = 255 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.522512] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.injected_files = 5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.522676] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.instances = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.522841] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.key_pairs = 100 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.523012] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.metadata_items = 128 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.523189] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.ram = 51200 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.523353] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.recheck_quota = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.523519] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.server_group_members = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.523682] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.server_groups = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.523884] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.524068] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] quota.unified_limits_resource_strategy = require {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.524245] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.524408] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.524568] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.image_metadata_prefilter = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.524728] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.524891] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.max_attempts = 3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.525067] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.max_placement_results = 1000 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.525237] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.525402] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.query_placement_for_image_type_support = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.525559] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.525733] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] scheduler.workers = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.525934] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.526126] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.526308] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.526476] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.526638] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.526799] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.527035] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.527247] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.527420] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.host_subset_size = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.527586] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.527744] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.527906] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.528084] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.isolated_hosts = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.528256] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.isolated_images = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.528412] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.528574] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.528733] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.528905] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.pci_in_placement = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.529082] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.529248] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.529411] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.529571] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.529731] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.529891] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.530064] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.track_instance_changes = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.530241] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.530411] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] metrics.required = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.530572] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] metrics.weight_multiplier = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.530734] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.530898] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] metrics.weight_setting = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.531261] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.531452] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] serial_console.enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.531633] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] serial_console.port_range = 10000:20000 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.531803] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.531969] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.532155] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] serial_console.serialproxy_port = 6083 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.532328] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.auth_section = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.532501] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.auth_type = password {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.532659] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.532814] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.532974] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.533149] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.533309] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.533477] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.send_service_user_token = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.533638] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.533806] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] service_user.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.533979] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.agent_enabled = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.534165] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.534485] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.534700] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.534870] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.html5proxy_port = 6082 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.535043] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.image_compression = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.535207] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.jpeg_compression = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.535437] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.playback_compression = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.535624] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.require_secure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.535795] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.server_listen = 127.0.0.1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.535999] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.536178] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.streaming_mode = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.536340] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] spice.zlib_compression = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.536505] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] upgrade_levels.baseapi = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.536675] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] upgrade_levels.compute = auto {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.536860] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] upgrade_levels.conductor = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.537051] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] upgrade_levels.scheduler = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.537237] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vendordata_dynamic_auth.auth_section = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.537401] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vendordata_dynamic_auth.auth_type = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.537559] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vendordata_dynamic_auth.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.537715] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vendordata_dynamic_auth.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.537875] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.538046] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vendordata_dynamic_auth.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.538208] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vendordata_dynamic_auth.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.538367] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.538521] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vendordata_dynamic_auth.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.538692] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.api_retry_count = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.538851] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.ca_file = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.539031] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.cache_prefix = devstack-image-cache {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.539203] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.cluster_name = testcl1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.539366] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.connection_pool_size = 10 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.539524] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.console_delay_seconds = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.539691] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.datastore_regex = ^datastore.* {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.539906] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.540090] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.host_password = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.540262] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.host_port = 443 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.540427] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.host_username = administrator@vsphere.local {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.540593] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.insecure = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.540756] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.integration_bridge = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.540916] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.maximum_objects = 100 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.541087] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.pbm_default_policy = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.541250] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.pbm_enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.541404] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.pbm_wsdl_location = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.541569] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.541722] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.serial_port_proxy_uri = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.541878] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.serial_port_service_uri = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.542055] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.task_poll_interval = 0.5 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.542228] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.use_linked_clone = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.542394] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.vnc_keymap = en-us {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.542560] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.vnc_port = 5900 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.542720] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vmware.vnc_port_total = 10000 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.542902] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.auth_schemes = ['none'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.543112] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.543477] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.543702] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.543926] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.novncproxy_port = 6080 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.544167] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.server_listen = 127.0.0.1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.544386] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.544590] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.vencrypt_ca_certs = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.544788] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.vencrypt_client_cert = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.544979] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vnc.vencrypt_client_key = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.545216] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.545419] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.disable_deep_image_inspection = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.545596] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.545780] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.545989] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.546202] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.disable_rootwrap = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.546392] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.enable_numa_live_migration = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.546560] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.546722] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.546979] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.547060] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.libvirt_disable_apic = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.547214] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.547377] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.547538] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.547698] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.547856] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.548024] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.548189] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.548346] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.548504] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.548665] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.548845] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.549027] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.client_socket_timeout = 900 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.549204] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.default_pool_size = 1000 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.549374] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.keep_alive = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.549543] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.max_header_line = 16384 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.549711] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.secure_proxy_ssl_header = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.549873] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.ssl_ca_file = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.550046] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.ssl_cert_file = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.550212] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.ssl_key_file = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.550395] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.tcp_keepidle = 600 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.550599] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.550797] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] zvm.ca_file = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.551018] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] zvm.cloud_connector_url = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.551321] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.551498] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] zvm.reachable_timeout = 300 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.551672] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.551847] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.552034] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.connection_string = messaging:// {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.552208] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.enabled = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.552379] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.es_doc_type = notification {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.552542] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.es_scroll_size = 10000 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.552707] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.es_scroll_time = 2m {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.552866] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.filter_error_trace = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.553084] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.hmac_keys = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.553215] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.sentinel_service_name = mymaster {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.553377] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.socket_timeout = 0.1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.553539] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.trace_requests = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.553698] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler.trace_sqlalchemy = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.553901] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler_jaeger.process_tags = {} {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.554089] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler_jaeger.service_name_prefix = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.554256] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] profiler_otlp.service_name_prefix = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.554422] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] remote_debug.host = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.554580] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] remote_debug.port = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.554759] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.554921] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.555099] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.555264] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.555426] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.555586] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.555743] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.555927] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.556116] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.556289] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.556448] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.556615] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.556779] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.556978] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.557132] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.557298] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.557459] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.557639] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.557811] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.557971] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.558154] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.558318] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.558478] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.558642] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.558801] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.558958] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.559134] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.559295] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.559463] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.559626] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.ssl = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.559838] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.560034] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.560206] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.560378] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.560547] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.ssl_version = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.560709] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.560900] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.561082] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_notifications.retry = -1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.561268] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.561440] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_messaging_notifications.transport_url = **** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.561611] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.auth_section = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.561773] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.auth_type = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.561932] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.cafile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.562101] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.certfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.562264] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.collect_timing = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.562420] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.connect_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.562575] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.connect_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.562731] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.endpoint_id = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.562901] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.endpoint_interface = publicURL {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.563072] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.endpoint_override = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.563231] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.endpoint_region_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.563387] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.endpoint_service_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.563540] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.endpoint_service_type = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.563699] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.insecure = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.563883] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.keyfile = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.564066] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.max_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.564228] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.min_version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.564384] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.region_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.564539] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.retriable_status_codes = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.564696] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.service_name = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.564852] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.service_type = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.565024] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.split_loggers = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.565197] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.status_code_retries = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.565348] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.status_code_retry_delay = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.565504] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.timeout = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.565658] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.valid_interfaces = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.565814] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_limit.version = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.566018] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_reports.file_event_handler = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.566191] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.566350] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] oslo_reports.log_dir = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.566521] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.566680] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.566839] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.567012] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.567179] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.567335] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.567503] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.567660] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_ovs_privileged.group = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.567815] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.567980] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.568155] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.568335] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] vif_plug_ovs_privileged.user = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.568508] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_linux_bridge.flat_interface = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.568686] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.568855] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.569043] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.569218] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.569384] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.569551] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.569712] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.569889] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.570074] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_ovs.isolate_vif = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.570246] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.570414] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.570581] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.570748] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_ovs.ovsdb_interface = native {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.570908] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] os_vif_ovs.per_port_bridge = False {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.571092] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] privsep_osbrick.capabilities = [21] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.571253] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] privsep_osbrick.group = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.571408] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] privsep_osbrick.helper_command = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.571571] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.571733] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.571891] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] privsep_osbrick.user = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.572074] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.572235] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] nova_sys_admin.group = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.572390] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] nova_sys_admin.helper_command = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.572549] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.572711] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.572868] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] nova_sys_admin.user = None {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 547.573009] env[62519]: DEBUG oslo_service.service [None req-808d48a6-9a5e-4bc5-b621-1add481e502a None None] ******************************************************************************** {{(pid=62519) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 547.573447] env[62519]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 548.077575] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-08184e25-cf62-4894-af83-c710824095ed None None] Getting list of instances from cluster (obj){ [ 548.077575] env[62519]: value = "domain-c8" [ 548.077575] env[62519]: _type = "ClusterComputeResource" [ 548.077575] env[62519]: } {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 548.078680] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c62b82-ac90-4b78-abef-216116676281 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.087114] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-08184e25-cf62-4894-af83-c710824095ed None None] Got total of 0 instances {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 548.087680] env[62519]: WARNING nova.virt.vmwareapi.driver [None req-08184e25-cf62-4894-af83-c710824095ed None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 548.088152] env[62519]: INFO nova.virt.node [None req-08184e25-cf62-4894-af83-c710824095ed None None] Generated node identity f8ca0d98-9158-4b85-ae0e-b106f966dd44 [ 548.088406] env[62519]: INFO nova.virt.node [None req-08184e25-cf62-4894-af83-c710824095ed None None] Wrote node identity f8ca0d98-9158-4b85-ae0e-b106f966dd44 to /opt/stack/data/n-cpu-1/compute_id [ 548.591174] env[62519]: WARNING nova.compute.manager [None req-08184e25-cf62-4894-af83-c710824095ed None None] Compute nodes ['f8ca0d98-9158-4b85-ae0e-b106f966dd44'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 549.596887] env[62519]: INFO nova.compute.manager [None req-08184e25-cf62-4894-af83-c710824095ed None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 550.602268] env[62519]: WARNING nova.compute.manager [None req-08184e25-cf62-4894-af83-c710824095ed None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 550.602826] env[62519]: DEBUG oslo_concurrency.lockutils [None req-08184e25-cf62-4894-af83-c710824095ed None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.602826] env[62519]: DEBUG oslo_concurrency.lockutils [None req-08184e25-cf62-4894-af83-c710824095ed None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.602983] env[62519]: DEBUG oslo_concurrency.lockutils [None req-08184e25-cf62-4894-af83-c710824095ed None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.603118] env[62519]: DEBUG nova.compute.resource_tracker [None req-08184e25-cf62-4894-af83-c710824095ed None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 550.603993] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad404da7-2f2c-4238-a6e1-e27ee496c7b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.611867] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ac3457-f458-4105-8612-229d307abd2c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.627116] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d98e9e1-a2c5-4d37-acd9-352897d56312 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.633687] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ea87e9-3ef3-4839-8952-42131841dc39 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.662280] env[62519]: DEBUG nova.compute.resource_tracker [None req-08184e25-cf62-4894-af83-c710824095ed None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181387MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 550.662426] env[62519]: DEBUG oslo_concurrency.lockutils [None req-08184e25-cf62-4894-af83-c710824095ed None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.662634] env[62519]: DEBUG oslo_concurrency.lockutils [None req-08184e25-cf62-4894-af83-c710824095ed None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.164651] env[62519]: WARNING nova.compute.resource_tracker [None req-08184e25-cf62-4894-af83-c710824095ed None None] No compute node record for cpu-1:f8ca0d98-9158-4b85-ae0e-b106f966dd44: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host f8ca0d98-9158-4b85-ae0e-b106f966dd44 could not be found. [ 551.668519] env[62519]: INFO nova.compute.resource_tracker [None req-08184e25-cf62-4894-af83-c710824095ed None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: f8ca0d98-9158-4b85-ae0e-b106f966dd44 [ 553.176456] env[62519]: DEBUG nova.compute.resource_tracker [None req-08184e25-cf62-4894-af83-c710824095ed None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 553.176831] env[62519]: DEBUG nova.compute.resource_tracker [None req-08184e25-cf62-4894-af83-c710824095ed None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 553.329670] env[62519]: INFO nova.scheduler.client.report [None req-08184e25-cf62-4894-af83-c710824095ed None None] [req-ed2ed8bb-b82d-48a9-b2b9-b35bcbbd83e2] Created resource provider record via placement API for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 553.346278] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4407d07-f1a0-4786-be68-9b942fb04fa5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.353659] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6647953b-7122-449c-8235-623907aba7cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.382796] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf7e50d-c6a9-4ed3-9bb5-bafce84429f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.389517] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41ecc5b-2b84-4a04-b5c0-412df9ecd8d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.402397] env[62519]: DEBUG nova.compute.provider_tree [None req-08184e25-cf62-4894-af83-c710824095ed None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 553.936458] env[62519]: DEBUG nova.scheduler.client.report [None req-08184e25-cf62-4894-af83-c710824095ed None None] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 553.936687] env[62519]: DEBUG nova.compute.provider_tree [None req-08184e25-cf62-4894-af83-c710824095ed None None] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 0 to 1 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 553.936824] env[62519]: DEBUG nova.compute.provider_tree [None req-08184e25-cf62-4894-af83-c710824095ed None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 553.986218] env[62519]: DEBUG nova.compute.provider_tree [None req-08184e25-cf62-4894-af83-c710824095ed None None] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 1 to 2 during operation: update_traits {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 554.490499] env[62519]: DEBUG nova.compute.resource_tracker [None req-08184e25-cf62-4894-af83-c710824095ed None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 554.490855] env[62519]: DEBUG oslo_concurrency.lockutils [None req-08184e25-cf62-4894-af83-c710824095ed None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.828s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.490932] env[62519]: DEBUG nova.service [None req-08184e25-cf62-4894-af83-c710824095ed None None] Creating RPC server for service compute {{(pid=62519) start /opt/stack/nova/nova/service.py:186}} [ 554.505852] env[62519]: DEBUG nova.service [None req-08184e25-cf62-4894-af83-c710824095ed None None] Join ServiceGroup membership for this service compute {{(pid=62519) start /opt/stack/nova/nova/service.py:203}} [ 554.506270] env[62519]: DEBUG nova.servicegroup.drivers.db [None req-08184e25-cf62-4894-af83-c710824095ed None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62519) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 558.509966] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.015024] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Getting list of instances from cluster (obj){ [ 559.015024] env[62519]: value = "domain-c8" [ 559.015024] env[62519]: _type = "ClusterComputeResource" [ 559.015024] env[62519]: } {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 559.015547] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2112d14c-d523-4a26-a128-13281a96b06c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.024315] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Got total of 0 instances {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 559.024723] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.025166] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Getting list of instances from cluster (obj){ [ 559.025166] env[62519]: value = "domain-c8" [ 559.025166] env[62519]: _type = "ClusterComputeResource" [ 559.025166] env[62519]: } {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 559.026413] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cf6d41-122c-4f25-a521-045be1b379ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.033849] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Got total of 0 instances {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 602.772359] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.772359] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.772359] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 602.772359] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 603.277353] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 603.277615] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.277855] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.278059] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.278269] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.278473] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.278663] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.278855] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 603.279033] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 603.782294] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.782687] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.782687] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.782838] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 603.783796] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5818a954-31f4-4f06-978b-5dd519a110a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.792111] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960fde08-413e-4580-9e58-4ed454d5caff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.805388] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2451c841-c32f-4be2-8a81-b9c552b2f23e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.811548] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fe3d7b-dc08-4387-92ac-c7eab60bab2a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.839869] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 603.839995] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.840187] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.859364] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 604.859640] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 604.874747] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178f057d-920b-4d4d-8ced-6317236601d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.882138] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14f52f1-5516-4bef-a65b-bb0478f51b22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.911079] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c1017c-fb95-4d1e-b627-3fc534d0c2a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.917736] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a2eaba-ad3f-4f19-978e-95d23d904e79 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.930590] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.433708] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 605.938616] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 605.939075] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.099s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.923196] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 665.923676] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.428753] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.428963] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 666.429084] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 666.934200] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 666.934596] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.934596] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.934723] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.934847] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.935019] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.935168] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.935293] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 666.935431] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 667.438954] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.439206] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.439385] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.439535] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 667.440468] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6b1ce7-dd00-45ac-af5f-aa04be6feaa4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.449020] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09336aa-1371-47a5-af2f-afb4ca0a924c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.462462] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94c5886-d55a-4951-8eda-1e2b20cde30f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.469360] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90355a44-7748-4f78-b09a-89b2a60fa935 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.499171] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181385MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 667.499296] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.499490] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.517553] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 668.517887] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 668.531075] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11d1398-8d8a-4c9d-9a38-ae195121e670 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.538836] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006df957-2378-430f-a48d-f96b760bc9ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.568153] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fced641-eed3-4ae9-a261-e75c7651e6f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.575645] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51b4916-45c3-4c10-84c7-9618f54257af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.588590] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.092205] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.093428] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 669.093609] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.095274] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.095908] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.095908] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 729.095908] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 729.599181] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 729.599385] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.599527] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.599668] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.599809] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.599949] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.600100] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.600226] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 729.600361] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.103582] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.103988] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.103988] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.104190] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 730.105023] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a210d2-78f4-4c04-a695-91bd92c31f93 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.113523] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57ff6c0-f9b9-4e9e-90d4-28fb218f6f6b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.127365] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e453c7-46d3-4575-a569-45dacc7fa1cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.133541] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3837c1-e5e4-49c0-bcce-3afd18072a7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.161576] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181386MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 730.161729] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.161925] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.181327] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 731.181602] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 731.195606] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6fe9dd-bc71-47c2-9e4f-1633b22dcde9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.204783] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89716401-8a8c-4f3b-bf03-e79ae90d48d9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.233603] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e1b93b-9aa7-4792-8a25-6aece44d93ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.240228] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d40ced6-d057-4edc-b1d1-f42d95dcaa64 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.252695] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.757266] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.758593] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 731.758805] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.597s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.418887] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.419181] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.925611] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.925865] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 790.925958] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 791.429110] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 791.429563] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.429563] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.429661] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.429804] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.429939] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.430096] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.430222] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 791.430360] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.933433] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.933681] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.933917] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.934120] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 791.935028] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6703d51-d1e9-48b0-bb3f-9a4a51dd7cca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.943676] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ead813-818f-4e4b-b349-6e40a14dae7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.957920] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b54d3e-9976-41a7-98a4-32c77f2dc171 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.964103] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c68645f-ae51-48dd-87e0-ec82e53110c6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.992157] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181385MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 791.992283] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.992505] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.009675] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 793.009899] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 793.022515] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac9ad93-528a-4910-968d-bfca1b8c5090 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.030108] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fc2ff9-2070-4f33-8657-879a067e9a64 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.059806] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7180c8-969d-4139-a5c2-41160a972424 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.066426] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734f9426-5399-4b00-8de5-30668f3971b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.078857] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.582145] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 793.583445] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 793.583627] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.762348] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.762733] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11720}} [ 843.266710] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] There are 0 instances to clean {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11729}} [ 843.266970] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 843.267116] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances with incomplete migration {{(pid=62519) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11758}} [ 843.769640] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.267050] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.267437] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.267437] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 846.267584] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 846.771189] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 846.771469] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.771577] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.771726] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.771868] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.771996] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 847.761591] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 847.761995] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.268149] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.268450] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.268650] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.268827] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 848.269837] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8a2521-0283-4d2c-9ed4-8c4f72a76c5e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.278456] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096dac00-db84-4509-8253-5aea1317562e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.293559] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da81b2a-2fe9-4b20-ac4e-da53cef1cc6b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.299970] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2806011f-5c1b-4cfe-ac06-2f02541a29b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.329775] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181384MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 848.329916] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.330120] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.349534] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 849.349877] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 849.363495] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31462b14-641b-4b5f-a6fb-b16ab35094c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.370691] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6eeee92-16f6-47b4-be8d-3e906f1d9354 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.400379] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422a5bfa-c967-49ba-baca-c17bdf4fb7ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.407191] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e3d057-8269-4771-8f19-b00674688050 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.419915] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.922720] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.924050] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 849.924246] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.924606] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 905.762609] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 905.763063] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 906.756547] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.761176] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.761358] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.762200] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.762577] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 907.762577] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 908.265426] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 908.265663] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.265885] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.762231] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.265907] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.266158] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.266326] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.266478] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 910.267379] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710295f4-6191-4a0d-9415-d004a4d7a5fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.275442] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634afb41-e6fa-463a-989e-df7a491f8265 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.288758] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89181a90-0887-44e8-b681-e0bf3833f5e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.294650] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6b71de-e648-46b4-8bf7-fe39c85275e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.322288] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181376MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 910.322411] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.322586] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.359357] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 911.359357] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 911.376347] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 911.392876] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 911.392876] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 911.404732] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 911.420202] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 911.432090] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516cb957-bc54-4b2d-a7da-3919ead39549 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.437036] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe7a4ae-a68d-4444-9aca-5f5f03c5c201 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.466935] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0bd09a-dc38-4ce1-b8c0-7610e7d5024d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.474046] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e514f9-e62e-4876-919a-a678ce5b56f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.487140] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.991113] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 911.991486] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 911.991739] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.669s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.986165] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 913.492255] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.764971] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.764971] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.764971] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 967.762357] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.762738] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.762025] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.762160] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 969.762299] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 970.265945] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 970.266323] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.266380] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.762083] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.265261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.265527] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.265671] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.265820] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 971.266766] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d2a003-920f-4dbe-b459-607bd214e77e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.275960] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1d710e-eacb-4ba0-b5fc-8dff3c7dbe54 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.289675] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1695888-932d-45a6-b33d-230d2fb947a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.296012] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1b8f92-2755-4b03-94cd-994ccdac06cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.325363] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181375MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 971.325499] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.325683] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.344617] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 972.344908] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 972.357855] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024ed662-c572-4e38-b155-c63f22c5e106 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.365522] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6851e2-ef21-44af-a39b-f72f9429501a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.394929] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598fbddd-fb95-43e6-b17f-68bd853b166d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.401784] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71aaefe-ead2-4fb0-a548-2aa96dff0fc9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.414531] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.917312] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.918570] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 972.918739] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.918763] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.757754] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.761403] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.762552] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.762904] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1028.762965] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1029.762239] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.763862] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.763862] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.266249] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.266498] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.266671] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.266825] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1031.267741] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4513dd04-5527-45a1-9684-a18325461d86 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.275973] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a983fe5-5757-4727-ae3d-7b615f21dcc7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.289448] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1966e42e-d32a-4892-8eb1-4b49fcce2ee5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.295586] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3970b3bd-935f-4c70-8fc2-741bc43931d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.324036] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181386MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1031.324168] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.324346] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.342066] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1032.342317] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1032.356397] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc107569-8742-42fa-924c-81b0bae5efd2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.363970] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf75373-bb4f-48e5-8c5f-9c50a678386a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.392874] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0307a6d1-af62-452d-ab31-7afd3ad8bda9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.399525] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8170422f-4f11-4652-880e-da7da6259237 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.412670] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.915314] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.916582] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1032.916763] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.917171] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.917542] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1033.917542] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 1034.420507] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 1034.761582] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.268882] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.762477] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1088.756799] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1088.761383] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.761600] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.761978] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1090.762643] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.763054] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.266061] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.266303] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.266472] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.266619] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1091.267535] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d7833d-15b0-4fe0-824c-b9931f0f3d03 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.275922] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482f8849-9192-4632-a1a7-9a27cb507268 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.289288] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2960aa6e-3386-4e1a-88ca-26166c6d143e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.295427] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a49ed46-ff9a-42f5-b5cd-b171ba6eeff8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.323764] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181381MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1091.323899] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.324085] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.344375] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1092.344974] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1092.359124] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f345bb06-715b-4771-b843-9ee5633f4d29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.367045] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78de2e99-e8f8-4e4d-8d05-aa96ebb58dc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.394846] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f8cd0d-ee10-42fa-9ad3-30c164cb0825 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.401438] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60d3e44-b67d-4814-8fa3-2926615c74ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.415904] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1092.959127] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 2 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1092.959456] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 2 to 3 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1092.959549] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1092.960865] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1092.961046] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.637s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.961049] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.961049] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1094.961464] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 1095.463980] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 1095.464154] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.464318] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.766058] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1147.265070] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1147.265070] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11720}} [ 1147.767656] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] There are 0 instances to clean {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11729}} [ 1149.265847] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1149.266270] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.756806] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.761395] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.761538] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1151.761924] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.762352] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.265357] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.265593] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.265750] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.265898] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1152.266808] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d31c66-f689-4558-a9c9-9f99b4fc6af0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.275163] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d2e167-e09b-46b0-9e17-c88979780c56 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.288361] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24efa040-648c-41d6-a305-cfc27a97eba3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.294429] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985018dc-c98c-4456-bec7-197b587c72f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.322538] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181376MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1152.322692] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.322861] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.341511] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1153.341775] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1153.354578] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ce7f5b-f9fa-45c4-a7de-e284b54204d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.361962] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c456841-fd4f-4635-95e1-f8cd55bb0dba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.390532] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce469c9-379b-419e-a159-3e94499709a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.397476] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a647639e-8d32-404f-8a32-2aa2ca521b9c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.410704] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.915666] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.915666] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1153.915666] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.762621] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.762905] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.763040] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.763182] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances with incomplete migration {{(pid=62519) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11758}} [ 1156.262510] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1156.768133] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1156.768133] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1156.768133] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 1157.270674] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 1158.510396] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.013463] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Getting list of instances from cluster (obj){ [ 1159.013463] env[62519]: value = "domain-c8" [ 1159.013463] env[62519]: _type = "ClusterComputeResource" [ 1159.013463] env[62519]: } {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1159.014537] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b12b47-c4af-4ea3-95eb-62e404ab86c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.023375] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Got total of 0 instances {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1209.276411] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.763256] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.762998] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.763302] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.267091] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.267339] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.267508] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1212.267655] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1212.268557] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1140b5ec-78d0-4055-96ec-02ee7e3efcfb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.277082] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3c3b98-49c5-4160-b8a2-d5d84bf7368d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.292202] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c30e07-d216-4705-9b73-eb7c4337e630 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.298839] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0057cf3-ad22-406a-9f95-fdf483cfd854 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.327604] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181384MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1212.327776] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.327936] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.488995] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1213.489275] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1213.505185] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1213.517866] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1213.518071] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1213.527984] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1213.543466] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1213.554823] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7688c4dc-f5e9-4b6b-bf91-4f2f83b1dda9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.561853] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d121d4fe-b605-4de7-96e2-9f0dd542aacf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.590831] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c968363a-58ec-445d-b808-d45ef0ab8b5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.597432] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f87731-589e-4062-acc4-38c815db53cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.609984] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.113478] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1214.114894] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1214.115118] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.787s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.108503] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.109059] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.109059] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.109373] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.109373] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1217.762591] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.763021] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1217.763021] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 1218.265848] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 1270.763471] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.763859] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.762578] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.761533] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.265131] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.265547] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.265547] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.265671] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1274.266629] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad284ce-2bc9-4254-92ef-8afca49e5607 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.274900] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9a9368-e218-4a73-8205-abb0e3d316b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.288306] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcd3601-6c17-49c6-8284-755cb631166b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.294508] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583c0852-8f66-478f-a021-114e2c42b49f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.323167] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181365MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1274.323354] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.323576] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.340852] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1275.341109] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1275.354454] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e4bd95-7d2e-4080-a7bc-58408b850f10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.361577] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f228f1-80b6-4d0c-a9d6-d4ba5beeee48 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.390202] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5d24b7-b1ce-4453-acc7-6f4710c9bbc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.397127] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78408758-8770-459c-87ed-9da4b5d28867 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.410394] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1275.913795] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1275.915083] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1275.915264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.909881] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.910320] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.416133] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.416133] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.416133] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.416133] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1279.762894] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1279.763294] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1279.763294] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 1280.266548] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 1330.763519] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1332.763611] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1332.763946] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.756598] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.761187] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.761311] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1334.761440] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.264678] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.264922] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.265119] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.265276] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1335.266533] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0fc1f59-08da-42aa-b916-c55d24910519 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.274821] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b733b3-08b5-4edf-ad46-2177f600ae99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.288783] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9f8fcf-ab42-453a-92bf-bee4763212f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.294893] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef70855-8438-4ac7-ac70-2cb841112726 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.323903] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181367MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1335.324062] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.328771] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.344175] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1336.344431] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1336.356797] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efbde885-a76f-415b-bc95-5e186094890a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.364442] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e477ad21-83d7-4fb8-8748-21036005bc8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.393652] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6bcc13f-abfe-4ce5-bac6-00c9c6c6ea9a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.400700] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55faeef-af27-4e07-9c73-5d8dcb4029cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.413696] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.917234] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1336.918502] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1336.918687] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.919638] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1338.920064] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1340.762062] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1340.762415] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1340.762415] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 1341.265114] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 1370.568606] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "f6e29557-05ad-4a11-bd01-0315926c0413" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.569129] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "f6e29557-05ad-4a11-bd01-0315926c0413" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.075035] env[62519]: DEBUG nova.compute.manager [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1371.622343] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.622702] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.625087] env[62519]: INFO nova.compute.claims [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1372.671835] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05232ad3-2ba8-4b3b-bccd-5aa7b9530f33 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.680339] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d96e27-6902-4e3b-b2c3-1d1b29c5c455 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.713347] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff84c15-bfe1-4124-9224-a4799d0f0403 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.721236] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f3cf9f-a0ba-47dd-b0d9-1604f96e3080 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.735482] env[62519]: DEBUG nova.compute.provider_tree [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.238398] env[62519]: DEBUG nova.scheduler.client.report [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1373.744585] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.122s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.745260] env[62519]: DEBUG nova.compute.manager [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1374.182746] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "fe350d30-6fbd-4813-9634-ed05984fecfd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.182999] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.250605] env[62519]: DEBUG nova.compute.utils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1374.253060] env[62519]: DEBUG nova.compute.manager [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1374.253318] env[62519]: DEBUG nova.network.neutron [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1374.686221] env[62519]: DEBUG nova.compute.manager [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1374.761145] env[62519]: DEBUG nova.compute.manager [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1375.217298] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.217532] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.218997] env[62519]: INFO nova.compute.claims [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1375.777823] env[62519]: DEBUG nova.compute.manager [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1376.291351] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8639b1-85f4-4588-8328-4c8f1a2a8e2d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.299295] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab85c5d8-58de-4bcf-8e65-e50cf2a1d625 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.331928] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298aa87f-7a6d-450d-b3e7-3c63ce06cc01 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.339522] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f03a03-0905-4d26-8509-7ca2c828e7fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.352916] env[62519]: DEBUG nova.compute.provider_tree [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.627022] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1376.627215] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.627367] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1376.627549] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.627693] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1376.627844] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1376.628181] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1376.628366] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1376.628534] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1376.628693] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1376.628857] env[62519]: DEBUG nova.virt.hardware [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1376.629707] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecaf9d5a-9801-498c-a836-f3dca677c15d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.637577] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3377b4-1455-4aa6-838d-a34f6239c80d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.658938] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6020e1f-4667-4c3e-89ad-78137a97a27d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.743272] env[62519]: DEBUG nova.policy [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5206cbd7eee2460cb041e0df1142c8ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb743623be114e74bb9f73bbca086979', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1376.856124] env[62519]: DEBUG nova.scheduler.client.report [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1377.345202] env[62519]: DEBUG nova.network.neutron [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Successfully created port: f5e6eec5-40b5-467f-8e50-4c4bc6cf8108 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1377.362128] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.362128] env[62519]: DEBUG nova.compute.manager [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1377.867584] env[62519]: DEBUG nova.compute.utils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1377.868924] env[62519]: DEBUG nova.compute.manager [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1377.869153] env[62519]: DEBUG nova.network.neutron [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1378.007357] env[62519]: DEBUG nova.policy [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a45b289ccc56467b85f612a102eebf72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae1756c7a3124e6aa1099cf99d6f6285', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1378.376073] env[62519]: DEBUG nova.compute.manager [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1378.669249] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "8b178cc0-db79-4ec2-8962-f31b936f8eff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.669491] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.928143] env[62519]: DEBUG nova.network.neutron [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Successfully created port: 6cce4a48-b732-4bd4-a39e-bbc701b31b3b {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.173693] env[62519]: DEBUG nova.compute.manager [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1379.390622] env[62519]: DEBUG nova.compute.manager [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1379.414850] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1379.415118] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1379.415271] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1379.415450] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1379.415591] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1379.415733] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1379.415955] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1379.416255] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1379.416862] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1379.416862] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1379.416862] env[62519]: DEBUG nova.virt.hardware [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1379.417728] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88257c7-457d-45f5-88b9-7a00c4e74ecc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.428131] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19bae080-1405-46bc-9fdd-fb5e747031a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.582996] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquiring lock "c616d8ec-f28a-4430-a336-1ea4790fd511" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.582996] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "c616d8ec-f28a-4430-a336-1ea4790fd511" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.707594] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.711807] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.713282] env[62519]: INFO nova.compute.claims [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1380.090236] env[62519]: DEBUG nova.compute.manager [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1380.309392] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "c8b7568b-ba07-4f65-818b-f84910209361" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.309474] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "c8b7568b-ba07-4f65-818b-f84910209361" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.314049] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquiring lock "ceadcb5e-ee82-4441-b046-f79b973ec05e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.314049] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "ceadcb5e-ee82-4441-b046-f79b973ec05e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.617725] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.764280] env[62519]: DEBUG nova.network.neutron [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Successfully updated port: f5e6eec5-40b5-467f-8e50-4c4bc6cf8108 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1380.816139] env[62519]: DEBUG nova.compute.manager [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1380.826596] env[62519]: DEBUG nova.compute.manager [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1380.892743] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquiring lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.892982] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.896177] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a439ff5-54cc-45af-87f6-8b81be6dd673 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.905802] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66498f8-6edf-421a-bd57-7f65b0a8c2ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.938812] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1181af-b08d-4532-8ef3-14de7de27b6f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.947470] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee379bf1-7e3c-4cf0-b781-c7cf7f6e4e83 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.962438] env[62519]: DEBUG nova.compute.provider_tree [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.267549] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "refresh_cache-f6e29557-05ad-4a11-bd01-0315926c0413" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.267864] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquired lock "refresh_cache-f6e29557-05ad-4a11-bd01-0315926c0413" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.267915] env[62519]: DEBUG nova.network.neutron [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1381.363706] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.367871] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.401408] env[62519]: DEBUG nova.compute.manager [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1381.468623] env[62519]: DEBUG nova.scheduler.client.report [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1381.713879] env[62519]: DEBUG nova.network.neutron [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Successfully updated port: 6cce4a48-b732-4bd4-a39e-bbc701b31b3b {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1381.945298] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.973380] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.266s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.973880] env[62519]: DEBUG nova.compute.manager [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1381.978095] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.360s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.979689] env[62519]: INFO nova.compute.claims [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1382.015753] env[62519]: DEBUG nova.network.neutron [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1382.216590] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.216742] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.216885] env[62519]: DEBUG nova.network.neutron [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1382.479907] env[62519]: DEBUG nova.compute.utils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1382.482461] env[62519]: DEBUG nova.compute.manager [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1382.482461] env[62519]: DEBUG nova.network.neutron [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1382.545897] env[62519]: DEBUG nova.network.neutron [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Updating instance_info_cache with network_info: [{"id": "f5e6eec5-40b5-467f-8e50-4c4bc6cf8108", "address": "fa:16:3e:b8:f4:7b", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e6eec5-40", "ovs_interfaceid": "f5e6eec5-40b5-467f-8e50-4c4bc6cf8108", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.593305] env[62519]: DEBUG nova.policy [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9b4ac19e10d4abdb7d7e54e5d093a8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0755b34e22d4478817ec4e2d57aac2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1382.779692] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquiring lock "2b391628-18a2-4606-8c59-58ba642cee50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.779916] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "2b391628-18a2-4606-8c59-58ba642cee50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.797139] env[62519]: DEBUG nova.compute.manager [req-b9a53b4f-fd04-4da2-b20a-7219f65d8105 req-1f8aa8c8-cd23-42e6-b9ec-6996edaef0f1 service nova] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Received event network-vif-plugged-f5e6eec5-40b5-467f-8e50-4c4bc6cf8108 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1382.797445] env[62519]: DEBUG oslo_concurrency.lockutils [req-b9a53b4f-fd04-4da2-b20a-7219f65d8105 req-1f8aa8c8-cd23-42e6-b9ec-6996edaef0f1 service nova] Acquiring lock "f6e29557-05ad-4a11-bd01-0315926c0413-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.797664] env[62519]: DEBUG oslo_concurrency.lockutils [req-b9a53b4f-fd04-4da2-b20a-7219f65d8105 req-1f8aa8c8-cd23-42e6-b9ec-6996edaef0f1 service nova] Lock "f6e29557-05ad-4a11-bd01-0315926c0413-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.797836] env[62519]: DEBUG oslo_concurrency.lockutils [req-b9a53b4f-fd04-4da2-b20a-7219f65d8105 req-1f8aa8c8-cd23-42e6-b9ec-6996edaef0f1 service nova] Lock "f6e29557-05ad-4a11-bd01-0315926c0413-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.798085] env[62519]: DEBUG nova.compute.manager [req-b9a53b4f-fd04-4da2-b20a-7219f65d8105 req-1f8aa8c8-cd23-42e6-b9ec-6996edaef0f1 service nova] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] No waiting events found dispatching network-vif-plugged-f5e6eec5-40b5-467f-8e50-4c4bc6cf8108 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1382.798189] env[62519]: WARNING nova.compute.manager [req-b9a53b4f-fd04-4da2-b20a-7219f65d8105 req-1f8aa8c8-cd23-42e6-b9ec-6996edaef0f1 service nova] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Received unexpected event network-vif-plugged-f5e6eec5-40b5-467f-8e50-4c4bc6cf8108 for instance with vm_state building and task_state spawning. [ 1382.860386] env[62519]: DEBUG nova.network.neutron [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1382.989791] env[62519]: DEBUG nova.compute.manager [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1383.051947] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Releasing lock "refresh_cache-f6e29557-05ad-4a11-bd01-0315926c0413" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.053033] env[62519]: DEBUG nova.compute.manager [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Instance network_info: |[{"id": "f5e6eec5-40b5-467f-8e50-4c4bc6cf8108", "address": "fa:16:3e:b8:f4:7b", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e6eec5-40", "ovs_interfaceid": "f5e6eec5-40b5-467f-8e50-4c4bc6cf8108", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1383.053198] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:f4:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5e6eec5-40b5-467f-8e50-4c4bc6cf8108', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1383.073883] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1383.082021] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ef50743-b913-4ffe-8237-acdacfe05a31 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.097891] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Created folder: OpenStack in parent group-v4. [ 1383.098516] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Creating folder: Project (fb743623be114e74bb9f73bbca086979). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1383.098516] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e80aecf-f886-4773-9268-8d4f471d80bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.114591] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Created folder: Project (fb743623be114e74bb9f73bbca086979) in parent group-v373567. [ 1383.114591] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Creating folder: Instances. Parent ref: group-v373568. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1383.114591] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41381e00-cf84-49c5-9c41-dfe9166b8066 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.126354] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Created folder: Instances in parent group-v373568. [ 1383.126675] env[62519]: DEBUG oslo.service.loopingcall [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.127846] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1383.127846] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8be11ae1-40ca-43bf-addb-56d477035b30 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.157099] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1383.157099] env[62519]: value = "task-1801883" [ 1383.157099] env[62519]: _type = "Task" [ 1383.157099] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.168246] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801883, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.283121] env[62519]: DEBUG nova.compute.manager [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1383.292842] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9708f4b2-a1af-4235-b051-3d090d940dba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.301716] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8001a9-660e-4c28-98ed-581c06542ff2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.339333] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe114ce-d754-44f0-88b8-79eb4a231648 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.347825] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9a4170-f396-484d-8332-d6dac7268625 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.364311] env[62519]: DEBUG nova.compute.provider_tree [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1383.379192] env[62519]: DEBUG nova.network.neutron [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance_info_cache with network_info: [{"id": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "address": "fa:16:3e:21:41:9e", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cce4a48-b7", "ovs_interfaceid": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.489940] env[62519]: DEBUG nova.network.neutron [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Successfully created port: f1b8639e-9539-46c0-8663-e7017bf77486 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1383.670954] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801883, 'name': CreateVM_Task, 'duration_secs': 0.362707} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.670954] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1383.684168] env[62519]: DEBUG oslo_vmware.service [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbf65cb-010b-42da-b306-f0d2ffb9f9c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.695235] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.695235] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.695235] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1383.695235] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77063fb4-45b6-4149-931d-34c6b0d39460 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.699662] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1383.699662] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b89b2a-35a4-9fe9-bafb-9fc36b656528" [ 1383.699662] env[62519]: _type = "Task" [ 1383.699662] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.709445] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b89b2a-35a4-9fe9-bafb-9fc36b656528, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.809675] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.868776] env[62519]: DEBUG nova.scheduler.client.report [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1383.880751] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.883926] env[62519]: DEBUG nova.compute.manager [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Instance network_info: |[{"id": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "address": "fa:16:3e:21:41:9e", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cce4a48-b7", "ovs_interfaceid": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1383.884491] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:41:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6cce4a48-b732-4bd4-a39e-bbc701b31b3b', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1383.892579] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Creating folder: Project (ae1756c7a3124e6aa1099cf99d6f6285). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1383.893145] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cdcb5ced-c41a-4fe9-940b-683f6eb23a31 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.906691] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Created folder: Project (ae1756c7a3124e6aa1099cf99d6f6285) in parent group-v373567. [ 1383.906887] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Creating folder: Instances. Parent ref: group-v373571. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1383.907169] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-657ffe3b-fbce-499e-a26f-3a796a85c6e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.916409] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Created folder: Instances in parent group-v373571. [ 1383.917042] env[62519]: DEBUG oslo.service.loopingcall [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.917042] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1383.917042] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46017fe4-909d-4e2e-83f8-14cf79fab62a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.941546] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1383.941546] env[62519]: value = "task-1801886" [ 1383.941546] env[62519]: _type = "Task" [ 1383.941546] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.950331] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801886, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.006450] env[62519]: DEBUG nova.compute.manager [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1384.053775] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1384.053775] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1384.053775] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1384.053775] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1384.054145] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1384.054145] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1384.054764] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1384.054764] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1384.054883] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1384.054993] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1384.055175] env[62519]: DEBUG nova.virt.hardware [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1384.056390] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe372bf-b287-4523-923a-d2e53ab388de {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.069030] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a5e543-a5ee-4934-9f92-dcaf86441a72 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.218277] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.218542] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1384.218777] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.218922] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.220507] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1384.221980] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67871eb9-2672-4752-8a22-7eb7ac5aaadc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.243555] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1384.249237] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1384.251651] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf8ee61-364f-46bd-a32e-10a591414493 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.258468] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56835c5a-bb7d-4893-b0a5-4c7c52ca1e50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.268782] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1384.268782] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5277aba6-06f1-4dfb-1ed8-453ada51edb5" [ 1384.268782] env[62519]: _type = "Task" [ 1384.268782] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.277381] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5277aba6-06f1-4dfb-1ed8-453ada51edb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.379036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.379357] env[62519]: DEBUG nova.compute.manager [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1384.383041] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.019s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.384549] env[62519]: INFO nova.compute.claims [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1384.462815] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801886, 'name': CreateVM_Task, 'duration_secs': 0.35098} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.462987] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1384.464035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.464035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.464188] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1384.464870] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b033c1f6-5cf6-4a66-b872-151379695f60 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.469606] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1384.469606] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527795d2-3d3d-b765-879c-46dbba3d2cef" [ 1384.469606] env[62519]: _type = "Task" [ 1384.469606] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.479442] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527795d2-3d3d-b765-879c-46dbba3d2cef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.512379] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "11d4a010-959f-4f53-94dc-7499007612ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.512379] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "11d4a010-959f-4f53-94dc-7499007612ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.780358] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Preparing fetch location {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1384.780607] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Creating directory with path [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1384.780859] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a532cf4-de0d-41b0-81f3-e186038981eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.802423] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Created directory with path [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1384.802636] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Fetch image to [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/tmp-sparse.vmdk {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1384.802966] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Downloading image file data 15793716-f1d9-4a86-9030-717adf498693 to [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/tmp-sparse.vmdk on the data store datastore1 {{(pid=62519) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1384.803672] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1aec9a-973d-495f-8d02-5866b64243f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.813785] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a3ee3a-9090-4bd6-8a5b-a6b825e39da7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.829566] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7b5d02-612c-4279-af77-9037f7bb1225 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.862169] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d418262c-a25b-4895-a212-e2dd4b9218fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.869295] env[62519]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bf1435ab-91d9-43d2-a04d-89624931ad9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.890445] env[62519]: DEBUG nova.compute.utils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1384.894650] env[62519]: DEBUG nova.compute.manager [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1384.894650] env[62519]: DEBUG nova.network.neutron [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1384.899637] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Downloading image file data 15793716-f1d9-4a86-9030-717adf498693 to the data store datastore1 {{(pid=62519) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1384.986067] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.986528] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1384.986894] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.987828] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62519) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1385.065871] env[62519]: DEBUG nova.compute.manager [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1385.072020] env[62519]: DEBUG nova.policy [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6378e66583c141beb766f593d72be3e1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb97d9e20070460a910c2b2b2c2c27aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1385.395405] env[62519]: DEBUG nova.compute.manager [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1385.600500] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.624321] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5725fb9f-46d0-4d93-a06b-24fcfe61ced2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.634243] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78342ea3-b93a-4a92-998b-a8ec77c040b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.670067] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef92b51-dea0-4ff1-85c0-632ceaa51db6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.681019] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f874834-84dd-4a88-80cf-30959dec6aee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.697645] env[62519]: DEBUG nova.compute.provider_tree [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1385.808516] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Completed reading data from the image iterator. {{(pid=62519) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1385.808578] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1385.850026] env[62519]: DEBUG nova.network.neutron [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Successfully updated port: f1b8639e-9539-46c0-8663-e7017bf77486 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1385.869150] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Downloaded image file data 15793716-f1d9-4a86-9030-717adf498693 to vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/tmp-sparse.vmdk on the data store datastore1 {{(pid=62519) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1385.872441] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Caching image {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1385.872441] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Copying Virtual Disk [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/tmp-sparse.vmdk to [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1385.872441] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c320f1c7-cce8-43f9-9f72-0e9166922a75 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.882608] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1385.882608] env[62519]: value = "task-1801887" [ 1385.882608] env[62519]: _type = "Task" [ 1385.882608] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.893331] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.057609] env[62519]: DEBUG nova.network.neutron [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Successfully created port: f9e01aea-e1f6-4372-aa97-658cfe3480e2 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1386.207341] env[62519]: DEBUG nova.scheduler.client.report [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1386.355853] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "refresh_cache-8b178cc0-db79-4ec2-8962-f31b936f8eff" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.355853] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "refresh_cache-8b178cc0-db79-4ec2-8962-f31b936f8eff" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.355853] env[62519]: DEBUG nova.network.neutron [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1386.371878] env[62519]: DEBUG nova.compute.manager [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Received event network-changed-f5e6eec5-40b5-467f-8e50-4c4bc6cf8108 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1386.372607] env[62519]: DEBUG nova.compute.manager [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Refreshing instance network info cache due to event network-changed-f5e6eec5-40b5-467f-8e50-4c4bc6cf8108. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1386.372607] env[62519]: DEBUG oslo_concurrency.lockutils [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] Acquiring lock "refresh_cache-f6e29557-05ad-4a11-bd01-0315926c0413" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.372607] env[62519]: DEBUG oslo_concurrency.lockutils [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] Acquired lock "refresh_cache-f6e29557-05ad-4a11-bd01-0315926c0413" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.372607] env[62519]: DEBUG nova.network.neutron [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Refreshing network info cache for port f5e6eec5-40b5-467f-8e50-4c4bc6cf8108 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1386.400437] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801887, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.411202] env[62519]: DEBUG nova.compute.manager [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1386.565019] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1386.565139] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1386.565251] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1386.565499] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1386.565569] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1386.565855] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1386.565916] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1386.566827] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1386.566827] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1386.570311] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1386.570386] env[62519]: DEBUG nova.virt.hardware [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1386.571877] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22881d1-3b08-4e4f-b574-7d52e30b208b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.581471] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b9822d-dc9c-4632-9ca2-d03a92a71dc0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.682026] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "34d2991e-b6df-473d-8994-e45ff57ef131" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.682328] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "34d2991e-b6df-473d-8994-e45ff57ef131" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.713434] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.714444] env[62519]: DEBUG nova.compute.manager [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1386.719976] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.352s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.721382] env[62519]: INFO nova.compute.claims [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1386.894436] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801887, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.66583} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.894723] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Copied Virtual Disk [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/tmp-sparse.vmdk to [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1386.894897] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Deleting the datastore file [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693/tmp-sparse.vmdk {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1386.896345] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9df6b91-7f5f-4e77-93ef-2b28272d8b6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.902523] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1386.902523] env[62519]: value = "task-1801888" [ 1386.902523] env[62519]: _type = "Task" [ 1386.902523] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.911492] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.912304] env[62519]: DEBUG nova.network.neutron [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1387.152264] env[62519]: DEBUG nova.network.neutron [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Updating instance_info_cache with network_info: [{"id": "f1b8639e-9539-46c0-8663-e7017bf77486", "address": "fa:16:3e:c2:e6:49", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1b8639e-95", "ovs_interfaceid": "f1b8639e-9539-46c0-8663-e7017bf77486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.188427] env[62519]: DEBUG nova.compute.manager [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1387.236051] env[62519]: DEBUG nova.compute.utils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1387.238549] env[62519]: DEBUG nova.compute.manager [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1387.238549] env[62519]: DEBUG nova.network.neutron [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1387.379600] env[62519]: DEBUG nova.policy [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba6b3804236d423cb5b9590468e6bf89', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edcdc78fe2504bebb5c834930b20d32e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1387.413706] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081504} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.413706] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1387.414118] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Moving file from [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a/15793716-f1d9-4a86-9030-717adf498693 to [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693. {{(pid=62519) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1387.414427] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-6af9c5f1-3477-45c9-9971-cbda3081d2b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.422469] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1387.422469] env[62519]: value = "task-1801889" [ 1387.422469] env[62519]: _type = "Task" [ 1387.422469] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.434419] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801889, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.656455] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "refresh_cache-8b178cc0-db79-4ec2-8962-f31b936f8eff" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.656455] env[62519]: DEBUG nova.compute.manager [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Instance network_info: |[{"id": "f1b8639e-9539-46c0-8663-e7017bf77486", "address": "fa:16:3e:c2:e6:49", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1b8639e-95", "ovs_interfaceid": "f1b8639e-9539-46c0-8663-e7017bf77486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1387.656877] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:e6:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '594b35bb-a20b-4f0e-bd35-9acf9cc6bf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1b8639e-9539-46c0-8663-e7017bf77486', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1387.666525] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating folder: Project (a0755b34e22d4478817ec4e2d57aac2a). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1387.666788] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-932c68dc-56f7-4273-ac3c-cd80274ebccf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.677540] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Created folder: Project (a0755b34e22d4478817ec4e2d57aac2a) in parent group-v373567. [ 1387.677739] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating folder: Instances. Parent ref: group-v373574. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1387.677974] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8aa8c90-78d0-4aa4-8831-dd07ef26ff3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.687296] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Created folder: Instances in parent group-v373574. [ 1387.688064] env[62519]: DEBUG oslo.service.loopingcall [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1387.688064] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1387.688490] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-308eb182-206a-4733-9286-ac475cee9e8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.715017] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1387.715017] env[62519]: value = "task-1801892" [ 1387.715017] env[62519]: _type = "Task" [ 1387.715017] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.722418] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801892, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.728542] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.744855] env[62519]: DEBUG nova.compute.manager [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1387.924755] env[62519]: DEBUG nova.network.neutron [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Updated VIF entry in instance network info cache for port f5e6eec5-40b5-467f-8e50-4c4bc6cf8108. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1387.924755] env[62519]: DEBUG nova.network.neutron [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Updating instance_info_cache with network_info: [{"id": "f5e6eec5-40b5-467f-8e50-4c4bc6cf8108", "address": "fa:16:3e:b8:f4:7b", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5e6eec5-40", "ovs_interfaceid": "f5e6eec5-40b5-467f-8e50-4c4bc6cf8108", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.939235] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801889, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.025413} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.939235] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] File moved {{(pid=62519) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1387.939235] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Cleaning up location [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1387.939235] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Deleting the datastore file [datastore1] vmware_temp/8bb661dd-f26f-4bae-b81c-213fc371975a {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1387.939382] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66ce6eb2-62b3-4e34-ba3f-deb392d7ce32 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.951508] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1387.951508] env[62519]: value = "task-1801893" [ 1387.951508] env[62519]: _type = "Task" [ 1387.951508] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.963545] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801893, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.028665] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5d92f6-519d-45c9-8ece-18d171f4abb7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.038159] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e989e388-14c1-4b1c-a81f-a9e95c876771 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.074883] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9733fb-3d5c-4dae-9140-c8c45b292486 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.084187] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c4f7ea-82dd-4832-90c2-ecac9700acd5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.108943] env[62519]: DEBUG nova.compute.provider_tree [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1388.229910] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801892, 'name': CreateVM_Task, 'duration_secs': 0.40263} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.230197] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1388.230839] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.230998] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.231327] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1388.231584] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1e9113a-90db-4338-b50a-a8ecbd2b960a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.245574] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1388.245574] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522c6d6d-dd45-3601-0794-2d39bf9c923d" [ 1388.245574] env[62519]: _type = "Task" [ 1388.245574] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.259038] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522c6d6d-dd45-3601-0794-2d39bf9c923d, 'name': SearchDatastore_Task, 'duration_secs': 0.009449} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.259730] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.260302] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1388.260665] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.350545] env[62519]: DEBUG nova.network.neutron [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Successfully created port: e03efe42-da32-408a-a635-c94ee5a55303 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1388.431958] env[62519]: DEBUG oslo_concurrency.lockutils [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] Releasing lock "refresh_cache-f6e29557-05ad-4a11-bd01-0315926c0413" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.431958] env[62519]: DEBUG nova.compute.manager [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Received event network-vif-plugged-6cce4a48-b732-4bd4-a39e-bbc701b31b3b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1388.431958] env[62519]: DEBUG oslo_concurrency.lockutils [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] Acquiring lock "fe350d30-6fbd-4813-9634-ed05984fecfd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.431958] env[62519]: DEBUG oslo_concurrency.lockutils [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.431958] env[62519]: DEBUG oslo_concurrency.lockutils [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.432273] env[62519]: DEBUG nova.compute.manager [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] No waiting events found dispatching network-vif-plugged-6cce4a48-b732-4bd4-a39e-bbc701b31b3b {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1388.432273] env[62519]: WARNING nova.compute.manager [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Received unexpected event network-vif-plugged-6cce4a48-b732-4bd4-a39e-bbc701b31b3b for instance with vm_state building and task_state spawning. [ 1388.432273] env[62519]: DEBUG nova.compute.manager [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Received event network-changed-6cce4a48-b732-4bd4-a39e-bbc701b31b3b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1388.432273] env[62519]: DEBUG nova.compute.manager [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Refreshing instance network info cache due to event network-changed-6cce4a48-b732-4bd4-a39e-bbc701b31b3b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1388.432273] env[62519]: DEBUG oslo_concurrency.lockutils [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] Acquiring lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.432416] env[62519]: DEBUG oslo_concurrency.lockutils [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] Acquired lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.432416] env[62519]: DEBUG nova.network.neutron [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Refreshing network info cache for port 6cce4a48-b732-4bd4-a39e-bbc701b31b3b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1388.466690] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801893, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026183} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.466993] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1388.471198] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ef88bd0-fbc1-4846-a55f-2e4eb763d68c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.479569] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1388.479569] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52db21e9-617a-e61f-abb7-83ad11f951bf" [ 1388.479569] env[62519]: _type = "Task" [ 1388.479569] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.488984] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52db21e9-617a-e61f-abb7-83ad11f951bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.546700] env[62519]: DEBUG nova.network.neutron [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Successfully updated port: f9e01aea-e1f6-4372-aa97-658cfe3480e2 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1388.636225] env[62519]: ERROR nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [req-1ca5530b-99d0-43fd-9f3e-78cce8bee222] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1ca5530b-99d0-43fd-9f3e-78cce8bee222"}]} [ 1388.672923] env[62519]: DEBUG nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1388.697087] env[62519]: DEBUG nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1388.697806] env[62519]: DEBUG nova.compute.provider_tree [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1388.722216] env[62519]: DEBUG nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1388.746522] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "099112ae-569b-4853-bc47-b0b8b97d2525" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.746788] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "099112ae-569b-4853-bc47-b0b8b97d2525" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.758549] env[62519]: DEBUG nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1388.768141] env[62519]: DEBUG nova.compute.manager [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1388.820096] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1388.820401] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1388.820550] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1388.820723] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1388.820858] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1388.820993] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1388.821541] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1388.821541] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1388.821541] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1388.821687] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1388.821844] env[62519]: DEBUG nova.virt.hardware [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1388.823390] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62959362-c437-4cb2-9072-51b98eb30c6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.833414] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f98ac2d-8d2f-443a-8568-101e49ec75c3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.001509] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52db21e9-617a-e61f-abb7-83ad11f951bf, 'name': SearchDatastore_Task, 'duration_secs': 0.030451} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.001509] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.001794] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f6e29557-05ad-4a11-bd01-0315926c0413/f6e29557-05ad-4a11-bd01-0315926c0413.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1389.001976] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.002472] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1389.002472] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbd6aaba-88ee-4812-b43b-f2a2a3659fe8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.004782] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd62935a-8c1b-4c7f-a752-d7b9b52749b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.019885] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1389.019885] env[62519]: value = "task-1801894" [ 1389.019885] env[62519]: _type = "Task" [ 1389.019885] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.025675] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1389.026058] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1389.033370] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d200fcb9-5f27-417b-81c1-990922d3c3ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.039924] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.044074] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1389.044074] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f55e5c-99e2-57c3-8dcc-c36b15742a07" [ 1389.044074] env[62519]: _type = "Task" [ 1389.044074] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.053767] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquiring lock "refresh_cache-c616d8ec-f28a-4430-a336-1ea4790fd511" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.054150] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquired lock "refresh_cache-c616d8ec-f28a-4430-a336-1ea4790fd511" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.054150] env[62519]: DEBUG nova.network.neutron [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1389.055671] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f55e5c-99e2-57c3-8dcc-c36b15742a07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.086456] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd68533-7b44-411a-b030-7d2f7b84e766 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.095649] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a5aafc-24e8-4269-bbe6-e10edba3932d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.137824] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80d2f3b-c02f-41a6-bada-0b1d749f68e6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.147063] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2e5534-bab5-4618-9bd8-33aee5676e37 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.166901] env[62519]: DEBUG nova.compute.provider_tree [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1389.432263] env[62519]: DEBUG nova.compute.manager [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Received event network-vif-plugged-f1b8639e-9539-46c0-8663-e7017bf77486 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1389.432559] env[62519]: DEBUG oslo_concurrency.lockutils [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] Acquiring lock "8b178cc0-db79-4ec2-8962-f31b936f8eff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.432819] env[62519]: DEBUG oslo_concurrency.lockutils [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.433074] env[62519]: DEBUG oslo_concurrency.lockutils [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.433239] env[62519]: DEBUG nova.compute.manager [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] No waiting events found dispatching network-vif-plugged-f1b8639e-9539-46c0-8663-e7017bf77486 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1389.433441] env[62519]: WARNING nova.compute.manager [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Received unexpected event network-vif-plugged-f1b8639e-9539-46c0-8663-e7017bf77486 for instance with vm_state building and task_state spawning. [ 1389.433843] env[62519]: DEBUG nova.compute.manager [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Received event network-changed-f1b8639e-9539-46c0-8663-e7017bf77486 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1389.433937] env[62519]: DEBUG nova.compute.manager [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Refreshing instance network info cache due to event network-changed-f1b8639e-9539-46c0-8663-e7017bf77486. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1389.435752] env[62519]: DEBUG oslo_concurrency.lockutils [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] Acquiring lock "refresh_cache-8b178cc0-db79-4ec2-8962-f31b936f8eff" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.435752] env[62519]: DEBUG oslo_concurrency.lockutils [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] Acquired lock "refresh_cache-8b178cc0-db79-4ec2-8962-f31b936f8eff" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.435752] env[62519]: DEBUG nova.network.neutron [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Refreshing network info cache for port f1b8639e-9539-46c0-8663-e7017bf77486 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1389.530539] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801894, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.556241] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f55e5c-99e2-57c3-8dcc-c36b15742a07, 'name': SearchDatastore_Task, 'duration_secs': 0.008959} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.560201] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72039eef-57ec-4005-8859-4122e7263668 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.568570] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1389.568570] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520e4755-d5a7-ae25-a5ed-3fca33338912" [ 1389.568570] env[62519]: _type = "Task" [ 1389.568570] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.580687] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520e4755-d5a7-ae25-a5ed-3fca33338912, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.692544] env[62519]: ERROR nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [req-126969c7-3911-4894-8b55-884bb0a16dc5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-126969c7-3911-4894-8b55-884bb0a16dc5"}]} [ 1389.711344] env[62519]: DEBUG nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1389.716886] env[62519]: DEBUG nova.network.neutron [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1389.733698] env[62519]: DEBUG nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1389.733698] env[62519]: DEBUG nova.compute.provider_tree [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1389.746924] env[62519]: DEBUG nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1389.779553] env[62519]: DEBUG nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1389.903715] env[62519]: DEBUG nova.network.neutron [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updated VIF entry in instance network info cache for port 6cce4a48-b732-4bd4-a39e-bbc701b31b3b. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1389.904090] env[62519]: DEBUG nova.network.neutron [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance_info_cache with network_info: [{"id": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "address": "fa:16:3e:21:41:9e", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cce4a48-b7", "ovs_interfaceid": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.910762] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "c07e4d30-44bc-417b-8137-97f974aec932" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.910762] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "c07e4d30-44bc-417b-8137-97f974aec932" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.036447] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801894, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551324} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.042404] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f6e29557-05ad-4a11-bd01-0315926c0413/f6e29557-05ad-4a11-bd01-0315926c0413.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1390.044369] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1390.046446] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16d80636-4052-45c7-916b-648ff93f939b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.064293] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1390.064293] env[62519]: value = "task-1801895" [ 1390.064293] env[62519]: _type = "Task" [ 1390.064293] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.101104] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801895, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.104391] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520e4755-d5a7-ae25-a5ed-3fca33338912, 'name': SearchDatastore_Task, 'duration_secs': 0.010739} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.104692] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.105042] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] fe350d30-6fbd-4813-9634-ed05984fecfd/fe350d30-6fbd-4813-9634-ed05984fecfd.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1390.106124] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.106124] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1390.106124] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e00590c2-e40f-441a-9f21-819b56ad949f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.111776] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffe2e08e-e431-4feb-ae88-0ba5966e4155 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.123412] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1390.123412] env[62519]: value = "task-1801896" [ 1390.123412] env[62519]: _type = "Task" [ 1390.123412] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.135040] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.137353] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4539aaf2-60af-480a-835c-b59ead30aa96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.147338] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7761fa-cddd-4c7a-89dd-9424f2b9c235 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.153306] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1390.153306] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1390.153306] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ca2ef68-cb57-4876-914b-770d371f7d04 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.159441] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1390.159441] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52916934-e4cc-6406-7328-3c635d0491d9" [ 1390.159441] env[62519]: _type = "Task" [ 1390.159441] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.197760] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4d45a6-a398-4461-95eb-f9f99e01ab33 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.207201] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52916934-e4cc-6406-7328-3c635d0491d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009835} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.212072] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-445c8724-d6cd-4c5c-a49a-5842ed77d113 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.219713] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4a9476-a928-4a06-bbce-6a1a8f567b7c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.238927] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1390.238927] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526201fa-88bf-2cef-4165-311a36d45528" [ 1390.238927] env[62519]: _type = "Task" [ 1390.238927] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.238927] env[62519]: DEBUG nova.compute.provider_tree [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1390.253713] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526201fa-88bf-2cef-4165-311a36d45528, 'name': SearchDatastore_Task, 'duration_secs': 0.009349} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.253713] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.253713] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8b178cc0-db79-4ec2-8962-f31b936f8eff/8b178cc0-db79-4ec2-8962-f31b936f8eff.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1390.253713] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22ec791d-6ae8-4996-b5e7-10d3b5399b1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.262273] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1390.262273] env[62519]: value = "task-1801897" [ 1390.262273] env[62519]: _type = "Task" [ 1390.262273] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.271419] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801897, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.359696] env[62519]: DEBUG nova.network.neutron [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Updating instance_info_cache with network_info: [{"id": "f9e01aea-e1f6-4372-aa97-658cfe3480e2", "address": "fa:16:3e:89:1a:75", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9e01aea-e1", "ovs_interfaceid": "f9e01aea-e1f6-4372-aa97-658cfe3480e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.406524] env[62519]: DEBUG oslo_concurrency.lockutils [req-0939c5c8-de08-43f1-b0ac-97970b61d9f2 req-740ed96a-83a0-46d4-8b64-6beca4295f66 service nova] Releasing lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.553868] env[62519]: DEBUG nova.network.neutron [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Updated VIF entry in instance network info cache for port f1b8639e-9539-46c0-8663-e7017bf77486. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1390.553868] env[62519]: DEBUG nova.network.neutron [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Updating instance_info_cache with network_info: [{"id": "f1b8639e-9539-46c0-8663-e7017bf77486", "address": "fa:16:3e:c2:e6:49", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1b8639e-95", "ovs_interfaceid": "f1b8639e-9539-46c0-8663-e7017bf77486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.579530] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062034} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.581057] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1390.581540] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d5e755-22d7-4d84-9710-09e79d84b8c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.616603] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] f6e29557-05ad-4a11-bd01-0315926c0413/f6e29557-05ad-4a11-bd01-0315926c0413.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.617022] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e06eb46b-6d8a-4ea0-a887-b2d1deaec344 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.647651] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801896, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500746} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.650319] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] fe350d30-6fbd-4813-9634-ed05984fecfd/fe350d30-6fbd-4813-9634-ed05984fecfd.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1390.651904] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1390.651904] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1390.651904] env[62519]: value = "task-1801898" [ 1390.651904] env[62519]: _type = "Task" [ 1390.651904] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.651904] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40f65218-cf45-45d5-b527-b88c63b91f78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.662722] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801898, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.666419] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1390.666419] env[62519]: value = "task-1801899" [ 1390.666419] env[62519]: _type = "Task" [ 1390.666419] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.676891] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801899, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.782633] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801897, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.790714] env[62519]: DEBUG nova.scheduler.client.report [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 17 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1390.791121] env[62519]: DEBUG nova.compute.provider_tree [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 17 to 18 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1390.791979] env[62519]: DEBUG nova.compute.provider_tree [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1390.864492] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Releasing lock "refresh_cache-c616d8ec-f28a-4430-a336-1ea4790fd511" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.864565] env[62519]: DEBUG nova.compute.manager [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Instance network_info: |[{"id": "f9e01aea-e1f6-4372-aa97-658cfe3480e2", "address": "fa:16:3e:89:1a:75", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9e01aea-e1", "ovs_interfaceid": "f9e01aea-e1f6-4372-aa97-658cfe3480e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1390.865148] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:1a:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9e01aea-e1f6-4372-aa97-658cfe3480e2', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1390.874549] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Creating folder: Project (eb97d9e20070460a910c2b2b2c2c27aa). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.874971] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62eb12dc-c1da-4294-a2b9-99193fa0f328 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.889323] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Created folder: Project (eb97d9e20070460a910c2b2b2c2c27aa) in parent group-v373567. [ 1390.889529] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Creating folder: Instances. Parent ref: group-v373577. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.889809] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9695ba90-6db2-4b06-987a-3d34704dbfa9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.898677] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Created folder: Instances in parent group-v373577. [ 1390.899032] env[62519]: DEBUG oslo.service.loopingcall [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1390.899292] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1390.899651] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60cab253-e82e-4307-ae26-04f7d858e79d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.921702] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1390.921702] env[62519]: value = "task-1801902" [ 1390.921702] env[62519]: _type = "Task" [ 1390.921702] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.935097] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801902, 'name': CreateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.059031] env[62519]: DEBUG oslo_concurrency.lockutils [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] Releasing lock "refresh_cache-8b178cc0-db79-4ec2-8962-f31b936f8eff" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.059031] env[62519]: DEBUG nova.compute.manager [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Received event network-vif-plugged-f9e01aea-e1f6-4372-aa97-658cfe3480e2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1391.059031] env[62519]: DEBUG oslo_concurrency.lockutils [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] Acquiring lock "c616d8ec-f28a-4430-a336-1ea4790fd511-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.059031] env[62519]: DEBUG oslo_concurrency.lockutils [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] Lock "c616d8ec-f28a-4430-a336-1ea4790fd511-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.059031] env[62519]: DEBUG oslo_concurrency.lockutils [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] Lock "c616d8ec-f28a-4430-a336-1ea4790fd511-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.059306] env[62519]: DEBUG nova.compute.manager [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] No waiting events found dispatching network-vif-plugged-f9e01aea-e1f6-4372-aa97-658cfe3480e2 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1391.059306] env[62519]: WARNING nova.compute.manager [req-90b89f3e-19ee-44cc-8861-03951598a5d6 req-6a2676ee-1080-48b8-85bf-5b5cbcf1c13f service nova] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Received unexpected event network-vif-plugged-f9e01aea-e1f6-4372-aa97-658cfe3480e2 for instance with vm_state building and task_state spawning. [ 1391.164651] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801898, 'name': ReconfigVM_Task, 'duration_secs': 0.34611} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.169524] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Reconfigured VM instance instance-00000001 to attach disk [datastore1] f6e29557-05ad-4a11-bd01-0315926c0413/f6e29557-05ad-4a11-bd01-0315926c0413.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1391.170350] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5fce8df6-36fe-4768-bc5b-bbcd19054ac7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.177804] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801899, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07836} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.179251] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1391.180112] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1391.180112] env[62519]: value = "task-1801903" [ 1391.180112] env[62519]: _type = "Task" [ 1391.180112] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.180445] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe8da01-ee60-4a37-bd60-bde997ff833e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.199584] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801903, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.230105] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] fe350d30-6fbd-4813-9634-ed05984fecfd/fe350d30-6fbd-4813-9634-ed05984fecfd.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1391.234471] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a51dcefb-cd04-45d7-ad21-88ad47b1d171 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.256236] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1391.256236] env[62519]: value = "task-1801904" [ 1391.256236] env[62519]: _type = "Task" [ 1391.256236] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.266783] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801904, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.277634] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801897, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79405} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.279888] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8b178cc0-db79-4ec2-8962-f31b936f8eff/8b178cc0-db79-4ec2-8962-f31b936f8eff.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1391.279888] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1391.279888] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8e22157-c6f1-4f1f-88ab-b177f23d509a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.287705] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1391.287705] env[62519]: value = "task-1801905" [ 1391.287705] env[62519]: _type = "Task" [ 1391.287705] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.301293] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.580s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.301293] env[62519]: DEBUG nova.compute.manager [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1391.303813] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.308019] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.363s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.309582] env[62519]: INFO nova.compute.claims [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1391.433373] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801902, 'name': CreateVM_Task, 'duration_secs': 0.41939} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.433589] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1391.434352] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.434630] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.435014] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1391.435954] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed91eb13-f3d4-473a-a1f5-1e8dd7063cec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.441057] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1391.441057] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5253bac0-f370-f724-8a76-a86ccfcb784d" [ 1391.441057] env[62519]: _type = "Task" [ 1391.441057] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.450961] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5253bac0-f370-f724-8a76-a86ccfcb784d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.472112] env[62519]: DEBUG nova.network.neutron [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Successfully updated port: e03efe42-da32-408a-a635-c94ee5a55303 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1391.695858] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801903, 'name': Rename_Task, 'duration_secs': 0.166357} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.696491] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.696964] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8908503-9186-43f2-85f0-ed4b0b5c21b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.704981] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1391.704981] env[62519]: value = "task-1801906" [ 1391.704981] env[62519]: _type = "Task" [ 1391.704981] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.715535] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801906, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.762385] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.770116] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801904, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.802147] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072518} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.802147] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1391.802147] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f9ad19-b936-4756-ad18-687066663ddc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.821697] env[62519]: DEBUG nova.compute.utils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1391.833364] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 8b178cc0-db79-4ec2-8962-f31b936f8eff/8b178cc0-db79-4ec2-8962-f31b936f8eff.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1391.836769] env[62519]: DEBUG nova.compute.manager [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Not allocating networking since 'none' was specified. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1985}} [ 1391.836769] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53d410d0-4109-4271-a54c-ebcb5d4c7276 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.861549] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1391.861549] env[62519]: value = "task-1801907" [ 1391.861549] env[62519]: _type = "Task" [ 1391.861549] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.871595] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801907, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.961118] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5253bac0-f370-f724-8a76-a86ccfcb784d, 'name': SearchDatastore_Task, 'duration_secs': 0.009626} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.961453] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.961775] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1391.961977] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.962125] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.962296] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1391.964884] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35d4d11a-e9ef-4007-83c3-d268059015f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.971464] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1391.971464] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1391.971989] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f13ea2d-a966-43af-aa3c-d4a96dd36b8b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.974869] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "refresh_cache-c8b7568b-ba07-4f65-818b-f84910209361" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.975033] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquired lock "refresh_cache-c8b7568b-ba07-4f65-818b-f84910209361" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.975187] env[62519]: DEBUG nova.network.neutron [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1391.978993] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1391.978993] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5276624c-734c-92d0-587d-999130b388c6" [ 1391.978993] env[62519]: _type = "Task" [ 1391.978993] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.988899] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5276624c-734c-92d0-587d-999130b388c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.220417] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801906, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.241127] env[62519]: DEBUG nova.compute.manager [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Received event network-changed-f9e01aea-e1f6-4372-aa97-658cfe3480e2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1392.241127] env[62519]: DEBUG nova.compute.manager [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Refreshing instance network info cache due to event network-changed-f9e01aea-e1f6-4372-aa97-658cfe3480e2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1392.242213] env[62519]: DEBUG oslo_concurrency.lockutils [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] Acquiring lock "refresh_cache-c616d8ec-f28a-4430-a336-1ea4790fd511" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.242622] env[62519]: DEBUG oslo_concurrency.lockutils [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] Acquired lock "refresh_cache-c616d8ec-f28a-4430-a336-1ea4790fd511" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.242932] env[62519]: DEBUG nova.network.neutron [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Refreshing network info cache for port f9e01aea-e1f6-4372-aa97-658cfe3480e2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1392.268408] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801904, 'name': ReconfigVM_Task, 'duration_secs': 0.538499} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.268674] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Reconfigured VM instance instance-00000002 to attach disk [datastore1] fe350d30-6fbd-4813-9634-ed05984fecfd/fe350d30-6fbd-4813-9634-ed05984fecfd.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1392.269446] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e4a0f389-4e8a-454e-b21f-7d4fbcccb3e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.276298] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1392.276298] env[62519]: value = "task-1801908" [ 1392.276298] env[62519]: _type = "Task" [ 1392.276298] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.287587] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801908, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.334961] env[62519]: DEBUG nova.compute.manager [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1392.380217] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801907, 'name': ReconfigVM_Task, 'duration_secs': 0.435785} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.380217] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 8b178cc0-db79-4ec2-8962-f31b936f8eff/8b178cc0-db79-4ec2-8962-f31b936f8eff.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1392.381181] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90f19e01-0b17-49a2-86db-5eb749a7e903 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.395749] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1392.395749] env[62519]: value = "task-1801909" [ 1392.395749] env[62519]: _type = "Task" [ 1392.395749] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.409297] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801909, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.496197] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5276624c-734c-92d0-587d-999130b388c6, 'name': SearchDatastore_Task, 'duration_secs': 0.008382} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.503029] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c3a17d3-3716-4574-b2d5-14faf1875074 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.509343] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1392.509343] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dce3de-bb10-d00b-8ebd-776ada3f1080" [ 1392.509343] env[62519]: _type = "Task" [ 1392.509343] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.520552] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dce3de-bb10-d00b-8ebd-776ada3f1080, 'name': SearchDatastore_Task, 'duration_secs': 0.009026} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.521321] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.521321] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c616d8ec-f28a-4430-a336-1ea4790fd511/c616d8ec-f28a-4430-a336-1ea4790fd511.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1392.521321] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-deb7e5ff-96a0-41e4-86a4-44a0d8199be3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.527912] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1392.527912] env[62519]: value = "task-1801910" [ 1392.527912] env[62519]: _type = "Task" [ 1392.527912] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.536977] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.668142] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2778a1c0-fa86-4155-b175-abd7fcf5d78f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.688979] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd55660-a0a2-4110-a865-12c7fc2e3099 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.725142] env[62519]: DEBUG nova.network.neutron [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1392.731020] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3a51e2-a76a-4754-87b9-f10c23b81029 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.742668] env[62519]: DEBUG oslo_vmware.api [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1801906, 'name': PowerOnVM_Task, 'duration_secs': 0.518693} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.743130] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1392.743601] env[62519]: INFO nova.compute.manager [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Took 16.97 seconds to spawn the instance on the hypervisor. [ 1392.743874] env[62519]: DEBUG nova.compute.manager [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1392.745289] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6396c0b-416f-4d9b-b017-0a918dd7221a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.750255] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f565dd-debf-4333-853c-f255a9132fa8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.771981] env[62519]: DEBUG nova.compute.provider_tree [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1392.789245] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801908, 'name': Rename_Task, 'duration_secs': 0.155142} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.789957] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1392.789957] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4611521b-1569-4568-aad9-1477503e78f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.796906] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1392.796906] env[62519]: value = "task-1801911" [ 1392.796906] env[62519]: _type = "Task" [ 1392.796906] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.805710] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.909094] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801909, 'name': Rename_Task, 'duration_secs': 0.212755} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.909962] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1392.910500] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-656a6765-e151-48da-ada2-daecfb62187d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.917136] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1392.917136] env[62519]: value = "task-1801912" [ 1392.917136] env[62519]: _type = "Task" [ 1392.917136] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.926652] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.040430] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801910, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494315} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.040723] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c616d8ec-f28a-4430-a336-1ea4790fd511/c616d8ec-f28a-4430-a336-1ea4790fd511.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1393.040864] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1393.041136] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75ac1b97-4736-431f-897b-3c5cc25fc1da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.049035] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1393.049035] env[62519]: value = "task-1801913" [ 1393.049035] env[62519]: _type = "Task" [ 1393.049035] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.064094] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801913, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.283659] env[62519]: DEBUG nova.scheduler.client.report [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1393.294249] env[62519]: INFO nova.compute.manager [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Took 21.71 seconds to build instance. [ 1393.311255] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801911, 'name': PowerOnVM_Task} progress is 96%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.343765] env[62519]: DEBUG nova.network.neutron [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Updating instance_info_cache with network_info: [{"id": "e03efe42-da32-408a-a635-c94ee5a55303", "address": "fa:16:3e:e0:64:4d", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape03efe42-da", "ovs_interfaceid": "e03efe42-da32-408a-a635-c94ee5a55303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.346344] env[62519]: DEBUG nova.compute.manager [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1393.383876] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1393.384135] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1393.384301] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1393.384471] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1393.385044] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1393.385044] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1393.385044] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1393.385262] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1393.385322] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1393.386040] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1393.386040] env[62519]: DEBUG nova.virt.hardware [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1393.387207] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb32cc13-63e9-4661-9812-55067ae5fecc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.397043] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702dc0b1-a09c-4f2d-b3a7-6749df8ff2ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.415190] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1393.422905] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Creating folder: Project (6560cd0055f94707a91ca6e638707e54). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.422905] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28d6e185-10cb-478a-b6ca-dd1a670ee5c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.435542] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801912, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.437349] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Created folder: Project (6560cd0055f94707a91ca6e638707e54) in parent group-v373567. [ 1393.437473] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Creating folder: Instances. Parent ref: group-v373580. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.437713] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0da6fe54-c441-4dea-99ed-a217c156b3ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.450761] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Created folder: Instances in parent group-v373580. [ 1393.450761] env[62519]: DEBUG oslo.service.loopingcall [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.450761] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1393.450761] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fb83d08-8d9c-4511-979b-fb4999e221d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.468970] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1393.468970] env[62519]: value = "task-1801916" [ 1393.468970] env[62519]: _type = "Task" [ 1393.468970] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.477012] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801916, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.538719] env[62519]: DEBUG nova.network.neutron [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Updated VIF entry in instance network info cache for port f9e01aea-e1f6-4372-aa97-658cfe3480e2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1393.546023] env[62519]: DEBUG nova.network.neutron [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Updating instance_info_cache with network_info: [{"id": "f9e01aea-e1f6-4372-aa97-658cfe3480e2", "address": "fa:16:3e:89:1a:75", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9e01aea-e1", "ovs_interfaceid": "f9e01aea-e1f6-4372-aa97-658cfe3480e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.561236] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801913, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0625} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.561574] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1393.562424] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778359b7-a6ba-497d-b885-b4a5a23c2fe3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.590170] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] c616d8ec-f28a-4430-a336-1ea4790fd511/c616d8ec-f28a-4430-a336-1ea4790fd511.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1393.590580] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6adfdd1f-12d3-4063-9997-a586629334ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.613123] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1393.613123] env[62519]: value = "task-1801917" [ 1393.613123] env[62519]: _type = "Task" [ 1393.613123] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.621366] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801917, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.762489] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.792764] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.485s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.795367] env[62519]: DEBUG nova.compute.manager [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1393.798958] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.989s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.802269] env[62519]: INFO nova.compute.claims [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1393.817932] env[62519]: DEBUG oslo_vmware.api [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1801911, 'name': PowerOnVM_Task, 'duration_secs': 0.562349} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.818998] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1393.818998] env[62519]: INFO nova.compute.manager [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Took 14.43 seconds to spawn the instance on the hypervisor. [ 1393.818998] env[62519]: DEBUG nova.compute.manager [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1393.821029] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d02681-9a6c-4369-9dc9-d94c64a29701 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.849112] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Releasing lock "refresh_cache-c8b7568b-ba07-4f65-818b-f84910209361" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.849947] env[62519]: DEBUG nova.compute.manager [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Instance network_info: |[{"id": "e03efe42-da32-408a-a635-c94ee5a55303", "address": "fa:16:3e:e0:64:4d", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape03efe42-da", "ovs_interfaceid": "e03efe42-da32-408a-a635-c94ee5a55303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1393.850432] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:64:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e03efe42-da32-408a-a635-c94ee5a55303', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1393.862034] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Creating folder: Project (edcdc78fe2504bebb5c834930b20d32e). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.863638] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4953f81d-7880-4433-a890-f142e24043ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.878551] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Created folder: Project (edcdc78fe2504bebb5c834930b20d32e) in parent group-v373567. [ 1393.878748] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Creating folder: Instances. Parent ref: group-v373583. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.879109] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-acc9963b-c862-406e-881e-8ea7d4e4cd75 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.888890] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Created folder: Instances in parent group-v373583. [ 1393.889150] env[62519]: DEBUG oslo.service.loopingcall [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.889687] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1393.889990] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5b9e139-9b22-4356-9022-351c72a0f7e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.917750] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1393.917750] env[62519]: value = "task-1801920" [ 1393.917750] env[62519]: _type = "Task" [ 1393.917750] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.926524] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801920, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.935110] env[62519]: DEBUG oslo_vmware.api [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801912, 'name': PowerOnVM_Task, 'duration_secs': 0.810872} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.935110] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1393.935213] env[62519]: INFO nova.compute.manager [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Took 9.93 seconds to spawn the instance on the hypervisor. [ 1393.935316] env[62519]: DEBUG nova.compute.manager [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1393.936373] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4cbde3-77b1-4b3e-be1c-3f696add4d67 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.982310] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801916, 'name': CreateVM_Task, 'duration_secs': 0.478394} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.982687] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1393.982991] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.983110] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.983385] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1393.983652] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1116e0bf-aa37-4e8e-98f5-7469a485c4b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.988421] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1393.988421] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294914f-1034-f29f-be71-06af57f0b310" [ 1393.988421] env[62519]: _type = "Task" [ 1393.988421] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.996874] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294914f-1034-f29f-be71-06af57f0b310, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.043896] env[62519]: DEBUG oslo_concurrency.lockutils [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] Releasing lock "refresh_cache-c616d8ec-f28a-4430-a336-1ea4790fd511" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.044580] env[62519]: DEBUG nova.compute.manager [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Received event network-vif-plugged-e03efe42-da32-408a-a635-c94ee5a55303 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1394.044883] env[62519]: DEBUG oslo_concurrency.lockutils [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] Acquiring lock "c8b7568b-ba07-4f65-818b-f84910209361-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.044967] env[62519]: DEBUG oslo_concurrency.lockutils [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] Lock "c8b7568b-ba07-4f65-818b-f84910209361-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.045233] env[62519]: DEBUG oslo_concurrency.lockutils [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] Lock "c8b7568b-ba07-4f65-818b-f84910209361-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.045456] env[62519]: DEBUG nova.compute.manager [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] No waiting events found dispatching network-vif-plugged-e03efe42-da32-408a-a635-c94ee5a55303 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1394.045824] env[62519]: WARNING nova.compute.manager [req-5e29af65-3c68-40fe-91f4-c1132de9dccd req-29797075-6489-4398-b6c5-9ca64544eb8b service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Received unexpected event network-vif-plugged-e03efe42-da32-408a-a635-c94ee5a55303 for instance with vm_state building and task_state spawning. [ 1394.122806] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801917, 'name': ReconfigVM_Task, 'duration_secs': 0.301037} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.123633] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Reconfigured VM instance instance-00000004 to attach disk [datastore1] c616d8ec-f28a-4430-a336-1ea4790fd511/c616d8ec-f28a-4430-a336-1ea4790fd511.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1394.124861] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9345c71-8c3c-4134-9e48-047e08da5103 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.132603] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1394.132603] env[62519]: value = "task-1801921" [ 1394.132603] env[62519]: _type = "Task" [ 1394.132603] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.143461] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801921, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.308764] env[62519]: DEBUG nova.compute.utils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1394.313551] env[62519]: DEBUG nova.compute.manager [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1394.313551] env[62519]: DEBUG nova.network.neutron [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1394.357417] env[62519]: INFO nova.compute.manager [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Took 19.17 seconds to build instance. [ 1394.424325] env[62519]: DEBUG nova.policy [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0dd3744b3a74cf3a374fc8c8be1d962', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'faf80934d7754c9ba0414f2e970979cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1394.433975] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801920, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.466473] env[62519]: INFO nova.compute.manager [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Took 14.78 seconds to build instance. [ 1394.506317] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294914f-1034-f29f-be71-06af57f0b310, 'name': SearchDatastore_Task, 'duration_secs': 0.009571} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.506317] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.506317] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1394.506317] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.506528] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.506528] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1394.506594] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b2dd8c2-92f2-46a2-a273-d56aea91db6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.516348] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1394.516516] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1394.518395] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a919584-6b56-443b-afd6-914f6985192e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.524207] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1394.524207] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a07c3f-d793-795d-17ac-4603a7b2839d" [ 1394.524207] env[62519]: _type = "Task" [ 1394.524207] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.533987] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a07c3f-d793-795d-17ac-4603a7b2839d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.646685] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801921, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.762565] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.762565] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.813829] env[62519]: DEBUG nova.compute.manager [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1394.831537] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e4798b6-5fc6-48b8-933e-b53f21d26399 tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "f6e29557-05ad-4a11-bd01-0315926c0413" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.262s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.931463] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801920, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.040165] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a07c3f-d793-795d-17ac-4603a7b2839d, 'name': SearchDatastore_Task, 'duration_secs': 0.008651} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.041214] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37128517-adab-49ec-82e0-5143232b3038 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.047761] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1395.047761] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52330e36-9617-a822-c9db-b3d9e3900ffa" [ 1395.047761] env[62519]: _type = "Task" [ 1395.047761] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.058389] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52330e36-9617-a822-c9db-b3d9e3900ffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.091122] env[62519]: DEBUG nova.network.neutron [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Successfully created port: 03319779-1123-4b6b-899b-834771303eff {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1395.112896] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a652d867-bd2e-4192-8680-91ca12f354dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.126123] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806c9ed7-e10c-4a59-99ee-66808385ac99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.163547] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "31f55ece-82e6-40ad-ad7c-1af645f307bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.163716] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "31f55ece-82e6-40ad-ad7c-1af645f307bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.168680] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51baa578-71d8-4af3-b522-93b81c1183bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.177330] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801921, 'name': Rename_Task, 'duration_secs': 0.902012} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.179702] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1395.180038] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbc31670-eef7-42e5-8591-e4bfed401d8b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.182831] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7034a7-6c3f-42b5-9605-6c0f7eec7abd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.199611] env[62519]: DEBUG nova.compute.provider_tree [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.201919] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1395.201919] env[62519]: value = "task-1801922" [ 1395.201919] env[62519]: _type = "Task" [ 1395.201919] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.212335] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801922, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.268899] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.292886] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "029ee07c-705d-452b-9b14-385d69f2fbbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.293242] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "029ee07c-705d-452b-9b14-385d69f2fbbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.300132] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Acquiring lock "f6e29557-05ad-4a11-bd01-0315926c0413" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.302354] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Lock "f6e29557-05ad-4a11-bd01-0315926c0413" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.302475] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Acquiring lock "f6e29557-05ad-4a11-bd01-0315926c0413-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.302966] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Lock "f6e29557-05ad-4a11-bd01-0315926c0413-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.303440] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Lock "f6e29557-05ad-4a11-bd01-0315926c0413-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.314933] env[62519]: INFO nova.compute.manager [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Terminating instance [ 1395.334864] env[62519]: DEBUG nova.compute.manager [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1395.435267] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801920, 'name': CreateVM_Task, 'duration_secs': 1.272722} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.435534] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1395.436180] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.436349] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.436765] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1395.436995] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e303793a-1e7f-4197-8819-0cfede6550a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.443409] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1395.443409] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ad8553-a82b-d112-61a7-6d3db990fe52" [ 1395.443409] env[62519]: _type = "Task" [ 1395.443409] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.458285] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ad8553-a82b-d112-61a7-6d3db990fe52, 'name': SearchDatastore_Task, 'duration_secs': 0.009262} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.458549] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.458798] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1395.459954] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.563693] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52330e36-9617-a822-c9db-b3d9e3900ffa, 'name': SearchDatastore_Task, 'duration_secs': 0.008312} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.563693] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.563984] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ceadcb5e-ee82-4441-b046-f79b973ec05e/ceadcb5e-ee82-4441-b046-f79b973ec05e.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1395.563984] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.564184] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1395.564450] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76c31a4e-b464-40d8-ba68-b0eefdec595d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.566698] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16c38d25-7b1e-40d2-b423-c3e918fc2967 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.574356] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1395.574356] env[62519]: value = "task-1801923" [ 1395.574356] env[62519]: _type = "Task" [ 1395.574356] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.578913] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1395.578913] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1395.581404] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d4fe2ba-4df5-4dac-a477-059d598e4aa7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.587030] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801923, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.589433] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1395.589433] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5208d4ab-07a8-ce81-0d37-8e4dcdb1df17" [ 1395.589433] env[62519]: _type = "Task" [ 1395.589433] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.600401] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5208d4ab-07a8-ce81-0d37-8e4dcdb1df17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.703226] env[62519]: DEBUG nova.scheduler.client.report [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1395.717083] env[62519]: DEBUG oslo_vmware.api [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801922, 'name': PowerOnVM_Task, 'duration_secs': 0.458256} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.717083] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1395.717083] env[62519]: INFO nova.compute.manager [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Took 9.30 seconds to spawn the instance on the hypervisor. [ 1395.717083] env[62519]: DEBUG nova.compute.manager [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1395.717404] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0756b3fe-5bcf-43ed-ad42-9b596e71044f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.825067] env[62519]: DEBUG nova.compute.manager [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1395.825322] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1395.826245] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742af345-e638-4dcf-bff7-229c59051ac4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.837314] env[62519]: DEBUG nova.compute.manager [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1395.839937] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1395.840103] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-144b6fb0-627d-4f35-9964-55f30635add0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.849377] env[62519]: DEBUG oslo_vmware.api [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Waiting for the task: (returnval){ [ 1395.849377] env[62519]: value = "task-1801924" [ 1395.849377] env[62519]: _type = "Task" [ 1395.849377] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.859707] env[62519]: DEBUG oslo_vmware.api [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Task: {'id': task-1801924, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.875458] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1395.875458] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1395.875458] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1395.875615] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1395.875615] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1395.875684] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1395.876103] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1395.876177] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1395.876307] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1395.876995] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1395.876995] env[62519]: DEBUG nova.virt.hardware [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1395.878395] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.879162] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c40feabf-b595-479f-ad29-1457519d4d7d tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.696s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.880851] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0533518b-dfd1-499b-9c79-ef04b8a1973a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.890974] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30bf136-6f27-4c0a-8adc-6cc3322252a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.962643] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquiring lock "91902e7f-8c15-447b-a3a8-04433434b1b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.965320] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.974660] env[62519]: DEBUG nova.compute.manager [req-3a56f5f6-7698-408c-83dd-972d724e2652 req-e4763236-30c7-467e-aa90-e0a45a8995af service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Received event network-changed-e03efe42-da32-408a-a635-c94ee5a55303 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1395.975017] env[62519]: DEBUG nova.compute.manager [req-3a56f5f6-7698-408c-83dd-972d724e2652 req-e4763236-30c7-467e-aa90-e0a45a8995af service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Refreshing instance network info cache due to event network-changed-e03efe42-da32-408a-a635-c94ee5a55303. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1395.975408] env[62519]: DEBUG oslo_concurrency.lockutils [req-3a56f5f6-7698-408c-83dd-972d724e2652 req-e4763236-30c7-467e-aa90-e0a45a8995af service nova] Acquiring lock "refresh_cache-c8b7568b-ba07-4f65-818b-f84910209361" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.975687] env[62519]: DEBUG oslo_concurrency.lockutils [req-3a56f5f6-7698-408c-83dd-972d724e2652 req-e4763236-30c7-467e-aa90-e0a45a8995af service nova] Acquired lock "refresh_cache-c8b7568b-ba07-4f65-818b-f84910209361" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.976061] env[62519]: DEBUG nova.network.neutron [req-3a56f5f6-7698-408c-83dd-972d724e2652 req-e4763236-30c7-467e-aa90-e0a45a8995af service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Refreshing network info cache for port e03efe42-da32-408a-a635-c94ee5a55303 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1395.978608] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34140d02-73d9-488b-8a14-f48460d5079c tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.309s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.088347] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801923, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.103119] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5208d4ab-07a8-ce81-0d37-8e4dcdb1df17, 'name': SearchDatastore_Task, 'duration_secs': 0.028295} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.103519] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9fa61a4-2424-4d65-a690-a23db4d6fda8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.111086] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1396.111086] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52906444-9f19-1313-08f5-92862f2ce701" [ 1396.111086] env[62519]: _type = "Task" [ 1396.111086] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.121376] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52906444-9f19-1313-08f5-92862f2ce701, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.212144] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.212717] env[62519]: DEBUG nova.compute.manager [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1396.216162] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.616s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.217873] env[62519]: INFO nova.compute.claims [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1396.238686] env[62519]: INFO nova.compute.manager [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Took 15.64 seconds to build instance. [ 1396.367866] env[62519]: DEBUG oslo_vmware.api [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Task: {'id': task-1801924, 'name': PowerOffVM_Task, 'duration_secs': 0.319642} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.368430] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1396.368677] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1396.368988] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50e09fdb-d601-461f-b885-b5c79dd51870 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.387850] env[62519]: DEBUG nova.compute.manager [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1396.448775] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1396.449032] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1396.449211] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Deleting the datastore file [datastore1] f6e29557-05ad-4a11-bd01-0315926c0413 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1396.449479] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7771520-bd2a-4686-9dcc-112cded68751 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.459826] env[62519]: DEBUG oslo_vmware.api [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Waiting for the task: (returnval){ [ 1396.459826] env[62519]: value = "task-1801926" [ 1396.459826] env[62519]: _type = "Task" [ 1396.459826] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.476512] env[62519]: DEBUG oslo_vmware.api [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Task: {'id': task-1801926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.484740] env[62519]: DEBUG nova.compute.manager [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1396.588465] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801923, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.723306} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.588847] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ceadcb5e-ee82-4441-b046-f79b973ec05e/ceadcb5e-ee82-4441-b046-f79b973ec05e.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1396.589467] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1396.589873] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad504b6f-91a2-4baa-a506-5eaf746b0724 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.598596] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1396.598596] env[62519]: value = "task-1801927" [ 1396.598596] env[62519]: _type = "Task" [ 1396.598596] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.607920] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.623291] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52906444-9f19-1313-08f5-92862f2ce701, 'name': SearchDatastore_Task, 'duration_secs': 0.066238} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.623667] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.623827] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c8b7568b-ba07-4f65-818b-f84910209361/c8b7568b-ba07-4f65-818b-f84910209361.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1396.624585] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39563637-da04-42f9-9853-05c89da69505 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.633765] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1396.633765] env[62519]: value = "task-1801928" [ 1396.633765] env[62519]: _type = "Task" [ 1396.633765] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.640529] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.724649] env[62519]: DEBUG nova.compute.utils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1396.728610] env[62519]: DEBUG nova.compute.manager [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1396.730435] env[62519]: DEBUG nova.network.neutron [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1396.887014] env[62519]: DEBUG nova.policy [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e7265dcd10204f0f93b2062d1965c301', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '998210eef9e04b64a426b66c965130a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1396.915113] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.973017] env[62519]: DEBUG oslo_vmware.api [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Task: {'id': task-1801926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374505} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.973189] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1396.973264] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1396.974277] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1396.974277] env[62519]: INFO nova.compute.manager [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1396.974277] env[62519]: DEBUG oslo.service.loopingcall [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.974277] env[62519]: DEBUG nova.compute.manager [-] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1396.974277] env[62519]: DEBUG nova.network.neutron [-] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1397.005495] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.025965] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "c61c893f-826b-4874-b253-de6fbffa9e5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.026278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.110663] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.176413} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.111098] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1397.111786] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb12f114-d47b-4f4a-b5e5-c74c89588239 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.136984] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] ceadcb5e-ee82-4441-b046-f79b973ec05e/ceadcb5e-ee82-4441-b046-f79b973ec05e.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1397.137339] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e88125c8-ade0-4b80-95f1-151c5d61c2bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.163580] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801928, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.165575] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1397.165575] env[62519]: value = "task-1801929" [ 1397.165575] env[62519]: _type = "Task" [ 1397.165575] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.175361] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801929, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.220838] env[62519]: DEBUG nova.network.neutron [req-3a56f5f6-7698-408c-83dd-972d724e2652 req-e4763236-30c7-467e-aa90-e0a45a8995af service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Updated VIF entry in instance network info cache for port e03efe42-da32-408a-a635-c94ee5a55303. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.221230] env[62519]: DEBUG nova.network.neutron [req-3a56f5f6-7698-408c-83dd-972d724e2652 req-e4763236-30c7-467e-aa90-e0a45a8995af service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Updating instance_info_cache with network_info: [{"id": "e03efe42-da32-408a-a635-c94ee5a55303", "address": "fa:16:3e:e0:64:4d", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape03efe42-da", "ovs_interfaceid": "e03efe42-da32-408a-a635-c94ee5a55303", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.229324] env[62519]: DEBUG nova.compute.manager [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1397.527360] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e28ffb-9685-4b07-8bd6-f1cfeb74660a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.538263] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988adadd-2b6c-4e5b-b0aa-e9b885fc95bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.574593] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b828b5-8e71-40d7-b2a4-3aa17c8c4a2c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.583560] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b1f7ba-31b5-4da9-b7fa-3d18618f9956 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.598353] env[62519]: DEBUG nova.compute.provider_tree [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.649225] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801928, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.677171] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.724564] env[62519]: DEBUG oslo_concurrency.lockutils [req-3a56f5f6-7698-408c-83dd-972d724e2652 req-e4763236-30c7-467e-aa90-e0a45a8995af service nova] Releasing lock "refresh_cache-c8b7568b-ba07-4f65-818b-f84910209361" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.749845] env[62519]: DEBUG oslo_concurrency.lockutils [None req-93eb857c-a392-41fa-ab77-ecb0c92fa9f6 tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "c616d8ec-f28a-4430-a336-1ea4790fd511" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.167s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.102293] env[62519]: DEBUG nova.scheduler.client.report [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1398.160206] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801928, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.179393] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801929, 'name': ReconfigVM_Task, 'duration_secs': 0.720679} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.180241] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Reconfigured VM instance instance-00000005 to attach disk [datastore1] ceadcb5e-ee82-4441-b046-f79b973ec05e/ceadcb5e-ee82-4441-b046-f79b973ec05e.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1398.180860] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-760124f9-b79e-4e0d-8690-41066e335b53 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.188327] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1398.188327] env[62519]: value = "task-1801930" [ 1398.188327] env[62519]: _type = "Task" [ 1398.188327] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.199023] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801930, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.200032] env[62519]: DEBUG nova.network.neutron [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Successfully created port: ac832bcb-7bb4-49de-be18-a8fd0d8ee16e {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1398.241136] env[62519]: DEBUG nova.compute.manager [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1398.252733] env[62519]: DEBUG nova.compute.manager [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1398.273344] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1398.273344] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1398.273344] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1398.274915] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1398.274915] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1398.274915] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1398.274915] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1398.274915] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1398.275343] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1398.275343] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1398.275463] env[62519]: DEBUG nova.virt.hardware [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1398.276827] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5a5b65-8200-4f06-8881-ec525caadd21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.286967] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c5e72b-710b-402d-9e73-c798663f7da7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.312304] env[62519]: DEBUG nova.network.neutron [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Successfully updated port: 03319779-1123-4b6b-899b-834771303eff {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1398.358719] env[62519]: DEBUG nova.network.neutron [-] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.608964] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.609726] env[62519]: DEBUG nova.compute.manager [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1398.612683] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.884s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.615885] env[62519]: INFO nova.compute.claims [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1398.659855] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801928, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.874569} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.664139] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "8b178cc0-db79-4ec2-8962-f31b936f8eff" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.664413] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.664637] env[62519]: DEBUG nova.compute.manager [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1398.664997] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c8b7568b-ba07-4f65-818b-f84910209361/c8b7568b-ba07-4f65-818b-f84910209361.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1398.665138] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1398.666263] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d983fc-10b4-4bfb-b584-65dbeb3e209a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.671774] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c77d97ef-e27e-407b-8e46-97aabb1eb185 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.680318] env[62519]: DEBUG nova.compute.manager [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1398.680318] env[62519]: DEBUG nova.objects.instance [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lazy-loading 'flavor' on Instance uuid 8b178cc0-db79-4ec2-8962-f31b936f8eff {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1398.685594] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1398.685594] env[62519]: value = "task-1801931" [ 1398.685594] env[62519]: _type = "Task" [ 1398.685594] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.699760] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801931, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.703402] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801930, 'name': Rename_Task, 'duration_secs': 0.219266} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.704285] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1398.704285] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc006c66-fa35-435a-8e7a-cfa7428bd9db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.715231] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1398.715231] env[62519]: value = "task-1801932" [ 1398.715231] env[62519]: _type = "Task" [ 1398.715231] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.729807] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801932, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.784812] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.813707] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquiring lock "refresh_cache-1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.816401] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquired lock "refresh_cache-1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.816401] env[62519]: DEBUG nova.network.neutron [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1398.862679] env[62519]: INFO nova.compute.manager [-] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Took 1.89 seconds to deallocate network for instance. [ 1399.122913] env[62519]: DEBUG nova.compute.utils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1399.125085] env[62519]: DEBUG nova.compute.manager [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1399.128385] env[62519]: DEBUG nova.network.neutron [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1399.203940] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076015} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.207576] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1399.208600] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e33ce3-a474-42a8-a762-948a138f515a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.237758] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] c8b7568b-ba07-4f65-818b-f84910209361/c8b7568b-ba07-4f65-818b-f84910209361.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1399.238682] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc8ea55c-89d3-41d8-b198-5ff21bfb956c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.257382] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801932, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.258838] env[62519]: DEBUG nova.policy [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81bb350c0ff54453b99b45ac84a82935', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '549cc35f5ff249f6bf22c67872883db0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1399.266837] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1399.266837] env[62519]: value = "task-1801933" [ 1399.266837] env[62519]: _type = "Task" [ 1399.266837] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.284420] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801933, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.370410] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.435518] env[62519]: DEBUG nova.network.neutron [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1399.520050] env[62519]: DEBUG nova.compute.manager [req-46e08b46-03dd-4489-87b5-41c786ac1162 req-361a1b7d-4525-40ae-8fc1-63da65e13f9a service nova] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Received event network-vif-plugged-03319779-1123-4b6b-899b-834771303eff {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1399.521127] env[62519]: DEBUG oslo_concurrency.lockutils [req-46e08b46-03dd-4489-87b5-41c786ac1162 req-361a1b7d-4525-40ae-8fc1-63da65e13f9a service nova] Acquiring lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.521127] env[62519]: DEBUG oslo_concurrency.lockutils [req-46e08b46-03dd-4489-87b5-41c786ac1162 req-361a1b7d-4525-40ae-8fc1-63da65e13f9a service nova] Lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.521127] env[62519]: DEBUG oslo_concurrency.lockutils [req-46e08b46-03dd-4489-87b5-41c786ac1162 req-361a1b7d-4525-40ae-8fc1-63da65e13f9a service nova] Lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.521127] env[62519]: DEBUG nova.compute.manager [req-46e08b46-03dd-4489-87b5-41c786ac1162 req-361a1b7d-4525-40ae-8fc1-63da65e13f9a service nova] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] No waiting events found dispatching network-vif-plugged-03319779-1123-4b6b-899b-834771303eff {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1399.521127] env[62519]: WARNING nova.compute.manager [req-46e08b46-03dd-4489-87b5-41c786ac1162 req-361a1b7d-4525-40ae-8fc1-63da65e13f9a service nova] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Received unexpected event network-vif-plugged-03319779-1123-4b6b-899b-834771303eff for instance with vm_state building and task_state spawning. [ 1399.521479] env[62519]: DEBUG nova.compute.manager [req-46e08b46-03dd-4489-87b5-41c786ac1162 req-361a1b7d-4525-40ae-8fc1-63da65e13f9a service nova] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Received event network-vif-deleted-f5e6eec5-40b5-467f-8e50-4c4bc6cf8108 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1399.635419] env[62519]: DEBUG nova.compute.manager [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1399.700115] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1399.700115] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-391bd689-4a9b-4d1d-ac35-704787462c99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.710283] env[62519]: DEBUG oslo_vmware.api [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1399.710283] env[62519]: value = "task-1801934" [ 1399.710283] env[62519]: _type = "Task" [ 1399.710283] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.753330] env[62519]: DEBUG oslo_vmware.api [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801934, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.753943] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801932, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.782831] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801933, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.045221] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c4835c-1ad6-4b95-bea4-9cdf55406eeb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.055407] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f99f37e-75c8-476f-96a1-610aee4dc029 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.098449] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4648b68a-cd60-4e51-8387-86082cd17d95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.107430] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c90b4bc-94c5-47a0-be8a-77f33482d4d9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.121997] env[62519]: DEBUG nova.compute.provider_tree [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.227382] env[62519]: DEBUG oslo_vmware.api [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801934, 'name': PowerOffVM_Task, 'duration_secs': 0.194236} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.231426] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1400.231648] env[62519]: DEBUG nova.compute.manager [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1400.237704] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88aec827-ca8e-4cb9-b281-f3a9db3e9e0e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.246367] env[62519]: DEBUG oslo_vmware.api [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1801932, 'name': PowerOnVM_Task, 'duration_secs': 1.318397} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.248366] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1400.249511] env[62519]: INFO nova.compute.manager [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Took 6.90 seconds to spawn the instance on the hypervisor. [ 1400.249511] env[62519]: DEBUG nova.compute.manager [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1400.251380] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68148683-ffd9-4ee4-90e2-e88068076682 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.278676] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801933, 'name': ReconfigVM_Task, 'duration_secs': 0.633816} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.278676] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Reconfigured VM instance instance-00000006 to attach disk [datastore1] c8b7568b-ba07-4f65-818b-f84910209361/c8b7568b-ba07-4f65-818b-f84910209361.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1400.279392] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ec22596-ea97-4b22-b4be-eb36939aa8c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.286774] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1400.286774] env[62519]: value = "task-1801935" [ 1400.286774] env[62519]: _type = "Task" [ 1400.286774] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.301823] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801935, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.434516] env[62519]: DEBUG nova.network.neutron [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Updating instance_info_cache with network_info: [{"id": "03319779-1123-4b6b-899b-834771303eff", "address": "fa:16:3e:0f:2c:0f", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03319779-11", "ovs_interfaceid": "03319779-1123-4b6b-899b-834771303eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.519534] env[62519]: DEBUG nova.compute.manager [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Stashing vm_state: active {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 1400.626264] env[62519]: DEBUG nova.scheduler.client.report [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1400.654549] env[62519]: DEBUG nova.compute.manager [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1400.695664] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1400.695885] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1400.696051] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1400.696230] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1400.696372] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1400.696512] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1400.696710] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1400.696859] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1400.697458] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1400.697695] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1400.697874] env[62519]: DEBUG nova.virt.hardware [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1400.699022] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d14c873-9e4e-4d3e-8c65-3025926ef10f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.708454] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e24fc3-794b-454d-a8cd-4776e1d190cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.758762] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82219a8e-d6bb-4ff7-a1ab-422694a4d0a5 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.094s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.761139] env[62519]: DEBUG nova.network.neutron [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Successfully created port: ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1400.770279] env[62519]: INFO nova.compute.manager [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Took 19.42 seconds to build instance. [ 1400.799971] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801935, 'name': Rename_Task, 'duration_secs': 0.395232} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.800497] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.800775] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1cab46e4-1f92-4fc7-9782-0cb704d4459f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.806863] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1400.806863] env[62519]: value = "task-1801936" [ 1400.806863] env[62519]: _type = "Task" [ 1400.806863] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.817946] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.937373] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Releasing lock "refresh_cache-1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.937652] env[62519]: DEBUG nova.compute.manager [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Instance network_info: |[{"id": "03319779-1123-4b6b-899b-834771303eff", "address": "fa:16:3e:0f:2c:0f", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03319779-11", "ovs_interfaceid": "03319779-1123-4b6b-899b-834771303eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1400.938540] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:2c:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03319779-1123-4b6b-899b-834771303eff', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1400.950755] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Creating folder: Project (faf80934d7754c9ba0414f2e970979cf). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1400.951301] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-030d346c-88fb-41a9-b31c-76b55274311d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.964991] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Created folder: Project (faf80934d7754c9ba0414f2e970979cf) in parent group-v373567. [ 1400.965467] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Creating folder: Instances. Parent ref: group-v373586. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1400.965467] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7c0abeb-0ee3-46a7-b6be-dcbf0cbaa327 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.977334] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Created folder: Instances in parent group-v373586. [ 1400.977596] env[62519]: DEBUG oslo.service.loopingcall [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1400.977796] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1400.978012] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-142bd190-f3bb-48bb-bd9b-8bbbae1d113b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.003098] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1401.003098] env[62519]: value = "task-1801939" [ 1401.003098] env[62519]: _type = "Task" [ 1401.003098] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.014696] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801939, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.050756] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.132244] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.132886] env[62519]: DEBUG nova.compute.manager [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1401.140389] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.871s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.140845] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.141093] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1401.141451] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.263s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.143681] env[62519]: INFO nova.compute.claims [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1401.149557] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d8a921-f47e-477c-ac60-d1398148e67b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.155012] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquiring lock "c616d8ec-f28a-4430-a336-1ea4790fd511" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.155686] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "c616d8ec-f28a-4430-a336-1ea4790fd511" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.155686] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquiring lock "c616d8ec-f28a-4430-a336-1ea4790fd511-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.155686] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "c616d8ec-f28a-4430-a336-1ea4790fd511-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.155932] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "c616d8ec-f28a-4430-a336-1ea4790fd511-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.159911] env[62519]: INFO nova.compute.manager [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Terminating instance [ 1401.170429] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c551d1-19a0-417c-9987-b11d093faf96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.196635] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ede3da0-7dce-4fdd-b654-f776340e2b0c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.205088] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c55cf50-9266-45fd-b062-d867721b17e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.242009] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181358MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1401.242380] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.328184] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801936, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.517606] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801939, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.651369] env[62519]: DEBUG nova.compute.utils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1401.653241] env[62519]: DEBUG nova.compute.manager [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1401.653566] env[62519]: DEBUG nova.network.neutron [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1401.668597] env[62519]: DEBUG nova.compute.manager [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1401.668823] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1401.669748] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee089fa8-23d5-4d50-863c-f451e1a86e2f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.685140] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1401.685140] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be0bf1b7-5593-4b0a-9fd3-09b4ef79911a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.692027] env[62519]: DEBUG oslo_vmware.api [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1401.692027] env[62519]: value = "task-1801940" [ 1401.692027] env[62519]: _type = "Task" [ 1401.692027] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.701457] env[62519]: DEBUG oslo_vmware.api [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801940, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.770566] env[62519]: DEBUG nova.policy [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8355a912c5b4aef8ad7f30c2e64fdc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '555fa612766f4b5fa173664ca3fa496c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1401.825103] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801936, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.005928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquiring lock "c612a1be-fb39-416d-a9d2-d206582e5aeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.005928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "c612a1be-fb39-416d-a9d2-d206582e5aeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.018183] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801939, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.153891] env[62519]: DEBUG nova.compute.manager [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1402.207773] env[62519]: DEBUG oslo_vmware.api [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801940, 'name': PowerOffVM_Task, 'duration_secs': 0.381371} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.208051] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1402.211016] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1402.211016] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-236a9a17-9044-46d3-ba74-8961a8a3b6ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.282642] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bb4ea3e-e5b3-4b8e-bc50-4a4b860c9418 tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "ceadcb5e-ee82-4441-b046-f79b973ec05e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.968s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.283126] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1402.283373] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1402.283551] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Deleting the datastore file [datastore1] c616d8ec-f28a-4430-a336-1ea4790fd511 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1402.284999] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cfb4b858-5e22-4ee1-a51e-64e9090f5ee0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.299478] env[62519]: DEBUG oslo_vmware.api [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for the task: (returnval){ [ 1402.299478] env[62519]: value = "task-1801942" [ 1402.299478] env[62519]: _type = "Task" [ 1402.299478] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.321088] env[62519]: DEBUG oslo_vmware.api [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.333707] env[62519]: DEBUG oslo_vmware.api [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1801936, 'name': PowerOnVM_Task, 'duration_secs': 1.145748} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.334534] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1402.334534] env[62519]: INFO nova.compute.manager [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Took 13.57 seconds to spawn the instance on the hypervisor. [ 1402.334534] env[62519]: DEBUG nova.compute.manager [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1402.335361] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab978698-f7f1-4738-adcf-58cc1b79c485 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.523858] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801939, 'name': CreateVM_Task, 'duration_secs': 1.361422} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.524475] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1402.525129] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.525129] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.526224] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1402.526462] env[62519]: DEBUG nova.network.neutron [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Successfully created port: e6b9aab2-f105-4c06-b204-f0626f41ccbe {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1402.528419] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6d008e5-0dd8-41e6-a7f2-61d5415f7ffa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.532564] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd978b81-3536-45a8-a457-15d52af15ab9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.540548] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1402.540548] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d143a9-e12d-4801-c106-5d3b258b6227" [ 1402.540548] env[62519]: _type = "Task" [ 1402.540548] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.548660] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0e5bec-e1f4-410e-8dc7-e6635c04a70a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.553025] env[62519]: DEBUG nova.network.neutron [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Successfully updated port: ac832bcb-7bb4-49de-be18-a8fd0d8ee16e {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1402.561647] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d143a9-e12d-4801-c106-5d3b258b6227, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.594947] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957556ce-bb81-4335-bd96-ddd5bcdb6167 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.605925] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a5b81d-35df-45d4-8417-3a8fcf7768bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.633156] env[62519]: DEBUG nova.compute.provider_tree [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1402.744594] env[62519]: DEBUG nova.compute.manager [None req-0906e05a-57dc-438f-910a-a56b6af978f2 tempest-ServerDiagnosticsV248Test-1358864958 tempest-ServerDiagnosticsV248Test-1358864958-project-admin] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1402.745875] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb90d2d3-8bdf-4982-9b2f-b4ba8d7f2837 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.758347] env[62519]: INFO nova.compute.manager [None req-0906e05a-57dc-438f-910a-a56b6af978f2 tempest-ServerDiagnosticsV248Test-1358864958 tempest-ServerDiagnosticsV248Test-1358864958-project-admin] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Retrieving diagnostics [ 1402.759904] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cadc00-776f-4cbb-b00f-da410d11d559 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.800022] env[62519]: DEBUG nova.compute.manager [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1402.814059] env[62519]: DEBUG oslo_vmware.api [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Task: {'id': task-1801942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281809} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.814331] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1402.814492] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1402.815145] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1402.815145] env[62519]: INFO nova.compute.manager [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1402.815145] env[62519]: DEBUG oslo.service.loopingcall [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1402.815322] env[62519]: DEBUG nova.compute.manager [-] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1402.817123] env[62519]: DEBUG nova.network.neutron [-] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1402.866202] env[62519]: INFO nova.compute.manager [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Took 21.53 seconds to build instance. [ 1403.056832] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquiring lock "refresh_cache-2b391628-18a2-4606-8c59-58ba642cee50" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.057134] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquired lock "refresh_cache-2b391628-18a2-4606-8c59-58ba642cee50" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.058422] env[62519]: DEBUG nova.network.neutron [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1403.061758] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d143a9-e12d-4801-c106-5d3b258b6227, 'name': SearchDatastore_Task, 'duration_secs': 0.019493} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.061952] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.062208] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1403.062441] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.062597] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.062796] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1403.063082] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69086f8d-bd33-44e8-9642-164065405b81 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.081868] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1403.081868] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1403.082647] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa50e655-c598-44ee-a132-db7f830d5e82 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.091818] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1403.091818] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dd6877-bdf3-d0f9-c50f-9d294d99c330" [ 1403.091818] env[62519]: _type = "Task" [ 1403.091818] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.100921] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dd6877-bdf3-d0f9-c50f-9d294d99c330, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.138728] env[62519]: DEBUG nova.scheduler.client.report [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1403.165337] env[62519]: DEBUG nova.compute.manager [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1403.202900] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1403.203175] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1403.203333] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1403.203511] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1403.203652] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1403.203793] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1403.203994] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1403.204185] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1403.204486] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1403.204486] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1403.204649] env[62519]: DEBUG nova.virt.hardware [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1403.205582] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bad2376-cd14-43e0-99d0-228a612b78a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.220435] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4234dec-cc5d-4d5a-907a-077dbdd79194 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.335030] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.397364] env[62519]: DEBUG nova.network.neutron [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Successfully updated port: ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1403.602430] env[62519]: DEBUG nova.network.neutron [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1403.608490] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dd6877-bdf3-d0f9-c50f-9d294d99c330, 'name': SearchDatastore_Task, 'duration_secs': 0.013254} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.609432] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-956e24c3-9ca8-4af5-b291-f9fdd7644b7e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.618020] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1403.618020] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52788748-ee2f-618c-1ee9-8c75af0b993d" [ 1403.618020] env[62519]: _type = "Task" [ 1403.618020] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.626881] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52788748-ee2f-618c-1ee9-8c75af0b993d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.648048] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.504s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.648048] env[62519]: DEBUG nova.compute.manager [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1403.649105] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.734s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.650728] env[62519]: INFO nova.compute.claims [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1403.751777] env[62519]: DEBUG nova.network.neutron [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Updating instance_info_cache with network_info: [{"id": "ac832bcb-7bb4-49de-be18-a8fd0d8ee16e", "address": "fa:16:3e:b2:f6:9d", "network": {"id": "0413ee0f-d5e1-4c09-b98e-01bbb1481053", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1023814213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998210eef9e04b64a426b66c965130a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac832bcb-7b", "ovs_interfaceid": "ac832bcb-7bb4-49de-be18-a8fd0d8ee16e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.899124] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.899603] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.899603] env[62519]: DEBUG nova.network.neutron [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1404.127996] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52788748-ee2f-618c-1ee9-8c75af0b993d, 'name': SearchDatastore_Task, 'duration_secs': 0.042831} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.128295] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.129978] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1/1118c1e5-1aa8-4f52-9fb9-e86531bf83d1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1404.129978] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de30f8af-a6ac-4cd6-b49a-e31700f6a14b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.136441] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1404.136441] env[62519]: value = "task-1801943" [ 1404.136441] env[62519]: _type = "Task" [ 1404.136441] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.146828] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801943, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.150283] env[62519]: DEBUG nova.compute.utils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1404.151521] env[62519]: DEBUG nova.compute.manager [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1404.151674] env[62519]: DEBUG nova.network.neutron [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1404.193128] env[62519]: DEBUG nova.compute.manager [req-60f3675d-d567-4b00-8533-4219266ac4e0 req-c86db273-7d87-44d3-9225-031c7f18cf58 service nova] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Received event network-changed-03319779-1123-4b6b-899b-834771303eff {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1404.193128] env[62519]: DEBUG nova.compute.manager [req-60f3675d-d567-4b00-8533-4219266ac4e0 req-c86db273-7d87-44d3-9225-031c7f18cf58 service nova] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Refreshing instance network info cache due to event network-changed-03319779-1123-4b6b-899b-834771303eff. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1404.193306] env[62519]: DEBUG oslo_concurrency.lockutils [req-60f3675d-d567-4b00-8533-4219266ac4e0 req-c86db273-7d87-44d3-9225-031c7f18cf58 service nova] Acquiring lock "refresh_cache-1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.193478] env[62519]: DEBUG oslo_concurrency.lockutils [req-60f3675d-d567-4b00-8533-4219266ac4e0 req-c86db273-7d87-44d3-9225-031c7f18cf58 service nova] Acquired lock "refresh_cache-1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.193627] env[62519]: DEBUG nova.network.neutron [req-60f3675d-d567-4b00-8533-4219266ac4e0 req-c86db273-7d87-44d3-9225-031c7f18cf58 service nova] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Refreshing network info cache for port 03319779-1123-4b6b-899b-834771303eff {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1404.244815] env[62519]: DEBUG nova.policy [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8355a912c5b4aef8ad7f30c2e64fdc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '555fa612766f4b5fa173664ca3fa496c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1404.253414] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Releasing lock "refresh_cache-2b391628-18a2-4606-8c59-58ba642cee50" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.253786] env[62519]: DEBUG nova.compute.manager [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Instance network_info: |[{"id": "ac832bcb-7bb4-49de-be18-a8fd0d8ee16e", "address": "fa:16:3e:b2:f6:9d", "network": {"id": "0413ee0f-d5e1-4c09-b98e-01bbb1481053", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1023814213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998210eef9e04b64a426b66c965130a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac832bcb-7b", "ovs_interfaceid": "ac832bcb-7bb4-49de-be18-a8fd0d8ee16e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1404.254194] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:f6:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac832bcb-7bb4-49de-be18-a8fd0d8ee16e', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1404.268138] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Creating folder: Project (998210eef9e04b64a426b66c965130a1). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.268536] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b25ba880-645e-466b-874f-c593dc08c5a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.281445] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Created folder: Project (998210eef9e04b64a426b66c965130a1) in parent group-v373567. [ 1404.283041] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Creating folder: Instances. Parent ref: group-v373589. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.283041] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-928adf3f-c85f-4cfd-9ca2-f832d51b8588 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.295691] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Created folder: Instances in parent group-v373589. [ 1404.296213] env[62519]: DEBUG oslo.service.loopingcall [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.296213] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1404.296992] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4864464-e336-4212-8b9d-7e2b49f2dcfa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.319425] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1404.319425] env[62519]: value = "task-1801946" [ 1404.319425] env[62519]: _type = "Task" [ 1404.319425] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.328775] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801946, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.379425] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2c6d3fbe-1808-4f6d-8aed-7a7c11b6565e tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "c8b7568b-ba07-4f65-818b-f84910209361" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.070s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.380522] env[62519]: DEBUG nova.compute.manager [req-e2d844f0-9635-4d50-9962-46b9c8d04e65 req-c7a34e24-220b-4464-ae1f-c4ffda6732ce service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Received event network-vif-plugged-ac832bcb-7bb4-49de-be18-a8fd0d8ee16e {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1404.380522] env[62519]: DEBUG oslo_concurrency.lockutils [req-e2d844f0-9635-4d50-9962-46b9c8d04e65 req-c7a34e24-220b-4464-ae1f-c4ffda6732ce service nova] Acquiring lock "2b391628-18a2-4606-8c59-58ba642cee50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.380687] env[62519]: DEBUG oslo_concurrency.lockutils [req-e2d844f0-9635-4d50-9962-46b9c8d04e65 req-c7a34e24-220b-4464-ae1f-c4ffda6732ce service nova] Lock "2b391628-18a2-4606-8c59-58ba642cee50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.380846] env[62519]: DEBUG oslo_concurrency.lockutils [req-e2d844f0-9635-4d50-9962-46b9c8d04e65 req-c7a34e24-220b-4464-ae1f-c4ffda6732ce service nova] Lock "2b391628-18a2-4606-8c59-58ba642cee50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.382183] env[62519]: DEBUG nova.compute.manager [req-e2d844f0-9635-4d50-9962-46b9c8d04e65 req-c7a34e24-220b-4464-ae1f-c4ffda6732ce service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] No waiting events found dispatching network-vif-plugged-ac832bcb-7bb4-49de-be18-a8fd0d8ee16e {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1404.382183] env[62519]: WARNING nova.compute.manager [req-e2d844f0-9635-4d50-9962-46b9c8d04e65 req-c7a34e24-220b-4464-ae1f-c4ffda6732ce service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Received unexpected event network-vif-plugged-ac832bcb-7bb4-49de-be18-a8fd0d8ee16e for instance with vm_state building and task_state spawning. [ 1404.465796] env[62519]: DEBUG nova.network.neutron [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1404.480745] env[62519]: DEBUG nova.network.neutron [-] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.647117] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801943, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.655340] env[62519]: DEBUG nova.compute.manager [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1404.832914] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801946, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.846193] env[62519]: DEBUG nova.network.neutron [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updating instance_info_cache with network_info: [{"id": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "address": "fa:16:3e:cc:ef:21", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1a3bbf-3f", "ovs_interfaceid": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.855737] env[62519]: DEBUG nova.network.neutron [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Successfully created port: 18c7d589-708e-4a8d-b1f1-c646d532c6aa {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1404.887312] env[62519]: DEBUG nova.compute.manager [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1404.959524] env[62519]: DEBUG nova.network.neutron [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Successfully updated port: e6b9aab2-f105-4c06-b204-f0626f41ccbe {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1404.984503] env[62519]: INFO nova.compute.manager [-] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Took 2.17 seconds to deallocate network for instance. [ 1405.017526] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc60f99-7bd9-4812-be1d-460e67b37abf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.028346] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f150c180-16bd-41d4-8389-fb025927d74a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.069047] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638756c3-7ad0-4912-a5b6-9f1685cfdee6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.079716] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "42497ab5-cce9-4614-a6d1-dffbf6764d7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.079852] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "42497ab5-cce9-4614-a6d1-dffbf6764d7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.089652] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c53c83-3285-4646-9a11-479e8f16ff9c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.107879] env[62519]: DEBUG nova.compute.provider_tree [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1405.148576] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801943, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594773} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.149309] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1/1118c1e5-1aa8-4f52-9fb9-e86531bf83d1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1405.149309] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1405.149309] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-284335bc-2eb3-48d4-8b95-af2200385262 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.156632] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1405.156632] env[62519]: value = "task-1801947" [ 1405.156632] env[62519]: _type = "Task" [ 1405.156632] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.172053] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801947, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.254371] env[62519]: DEBUG nova.network.neutron [req-60f3675d-d567-4b00-8533-4219266ac4e0 req-c86db273-7d87-44d3-9225-031c7f18cf58 service nova] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Updated VIF entry in instance network info cache for port 03319779-1123-4b6b-899b-834771303eff. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1405.254371] env[62519]: DEBUG nova.network.neutron [req-60f3675d-d567-4b00-8533-4219266ac4e0 req-c86db273-7d87-44d3-9225-031c7f18cf58 service nova] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Updating instance_info_cache with network_info: [{"id": "03319779-1123-4b6b-899b-834771303eff", "address": "fa:16:3e:0f:2c:0f", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03319779-11", "ovs_interfaceid": "03319779-1123-4b6b-899b-834771303eff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.333578] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801946, 'name': CreateVM_Task, 'duration_secs': 0.696573} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.333578] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1405.335030] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.335030] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.335030] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1405.335030] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b290eb15-3297-4153-918e-be7ced77ccad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.340655] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1405.340655] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521387e8-a519-71b9-9b63-c94717baf291" [ 1405.340655] env[62519]: _type = "Task" [ 1405.340655] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.350125] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521387e8-a519-71b9-9b63-c94717baf291, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.359420] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.359748] env[62519]: DEBUG nova.compute.manager [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Instance network_info: |[{"id": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "address": "fa:16:3e:cc:ef:21", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1a3bbf-3f", "ovs_interfaceid": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1405.360211] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:ef:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca1a3bbf-3f10-4a96-a67d-b77464ab25e7', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1405.372994] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Creating folder: Project (549cc35f5ff249f6bf22c67872883db0). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1405.372994] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90f6b6b7-753e-485f-97d4-ca353a016650 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.386576] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Created folder: Project (549cc35f5ff249f6bf22c67872883db0) in parent group-v373567. [ 1405.386979] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Creating folder: Instances. Parent ref: group-v373592. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1405.387068] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72cab7a7-1cda-44b4-b56b-dd716addf669 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.396997] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Created folder: Instances in parent group-v373592. [ 1405.397304] env[62519]: DEBUG oslo.service.loopingcall [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1405.399556] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1405.400081] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-193590f0-0080-4ba5-b084-dc8cd549cde2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.417320] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.424354] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1405.424354] env[62519]: value = "task-1801950" [ 1405.424354] env[62519]: _type = "Task" [ 1405.424354] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.432264] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801950, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.462744] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "refresh_cache-34d2991e-b6df-473d-8994-e45ff57ef131" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.462744] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "refresh_cache-34d2991e-b6df-473d-8994-e45ff57ef131" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.462744] env[62519]: DEBUG nova.network.neutron [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1405.492963] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.614347] env[62519]: DEBUG nova.scheduler.client.report [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1405.669361] env[62519]: DEBUG nova.compute.manager [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1405.671181] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801947, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065878} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.672154] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1405.672489] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92b3bed-f12f-4349-9383-37f9a6c118c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.711897] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1/1118c1e5-1aa8-4f52-9fb9-e86531bf83d1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1405.714201] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-759d823f-b5a8-4434-a4be-f06767c816a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.741429] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1405.741429] env[62519]: value = "task-1801951" [ 1405.741429] env[62519]: _type = "Task" [ 1405.741429] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.745295] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1405.746093] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1405.746093] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1405.746093] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1405.746093] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1405.746531] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1405.746531] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1405.746531] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1405.746685] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1405.747538] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1405.747538] env[62519]: DEBUG nova.virt.hardware [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1405.748168] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4228abe9-f6dc-4eb6-ad3b-d32aee21b6ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.759289] env[62519]: DEBUG oslo_concurrency.lockutils [req-60f3675d-d567-4b00-8533-4219266ac4e0 req-c86db273-7d87-44d3-9225-031c7f18cf58 service nova] Releasing lock "refresh_cache-1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.759838] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801951, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.763801] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc0e2a8-dbc0-4bdc-9245-228d1bd6ffff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.852256] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521387e8-a519-71b9-9b63-c94717baf291, 'name': SearchDatastore_Task, 'duration_secs': 0.049066} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.852572] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.852835] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1405.853102] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.853259] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.853437] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1405.853726] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94faa7c2-61a8-4395-9aea-aa1f9a16581d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.936056] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801950, 'name': CreateVM_Task, 'duration_secs': 0.427356} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.936056] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1405.936667] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.936835] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.937157] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1405.937439] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e388ec0-7732-4534-9f31-e068e973e192 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.942542] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1405.942542] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529f8989-1ce5-1a93-80ca-3b4abdeb3380" [ 1405.942542] env[62519]: _type = "Task" [ 1405.942542] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.953780] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529f8989-1ce5-1a93-80ca-3b4abdeb3380, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.997094] env[62519]: DEBUG nova.network.neutron [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1406.121172] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.121759] env[62519]: DEBUG nova.compute.manager [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1406.125303] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.119s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.126174] env[62519]: INFO nova.compute.claims [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1406.191344] env[62519]: DEBUG nova.network.neutron [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Updating instance_info_cache with network_info: [{"id": "e6b9aab2-f105-4c06-b204-f0626f41ccbe", "address": "fa:16:3e:ed:4a:67", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b9aab2-f1", "ovs_interfaceid": "e6b9aab2-f105-4c06-b204-f0626f41ccbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.251640] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801951, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.452138] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529f8989-1ce5-1a93-80ca-3b4abdeb3380, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.601031] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1406.601031] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1406.601280] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17449db8-3f9e-48c8-bb2d-bcbb7df3f0ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.609502] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1406.609502] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522cf83d-599c-a613-ae2a-968c0924fa9d" [ 1406.609502] env[62519]: _type = "Task" [ 1406.609502] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.619721] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522cf83d-599c-a613-ae2a-968c0924fa9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.631071] env[62519]: DEBUG nova.compute.utils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1406.641648] env[62519]: DEBUG nova.compute.manager [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Not allocating networking since 'none' was specified. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1985}} [ 1406.699670] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "refresh_cache-34d2991e-b6df-473d-8994-e45ff57ef131" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.700028] env[62519]: DEBUG nova.compute.manager [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Instance network_info: |[{"id": "e6b9aab2-f105-4c06-b204-f0626f41ccbe", "address": "fa:16:3e:ed:4a:67", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b9aab2-f1", "ovs_interfaceid": "e6b9aab2-f105-4c06-b204-f0626f41ccbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1406.701041] env[62519]: DEBUG nova.compute.manager [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1406.701472] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:4a:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6b9aab2-f105-4c06-b204-f0626f41ccbe', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1406.709795] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Creating folder: Project (555fa612766f4b5fa173664ca3fa496c). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1406.711704] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91eed42-d35f-49a8-a9f6-ed41499c9123 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.715104] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6f27498-16a7-40c7-ae94-1f00b01378cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.725746] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Created folder: Project (555fa612766f4b5fa173664ca3fa496c) in parent group-v373567. [ 1406.726367] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Creating folder: Instances. Parent ref: group-v373595. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1406.726455] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-745446e9-6646-469d-b10d-061c5a249e53 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.737281] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Created folder: Instances in parent group-v373595. [ 1406.737532] env[62519]: DEBUG oslo.service.loopingcall [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1406.737723] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1406.737943] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-676559ab-ecc7-4006-b9bc-40b9d5824c13 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.767171] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801951, 'name': ReconfigVM_Task, 'duration_secs': 0.925543} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.768632] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1/1118c1e5-1aa8-4f52-9fb9-e86531bf83d1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1406.769249] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1406.769249] env[62519]: value = "task-1801954" [ 1406.769249] env[62519]: _type = "Task" [ 1406.769249] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.769432] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-756dcfb1-52b4-4bd6-9523-e90e027c19c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.779651] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801954, 'name': CreateVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.780373] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1406.780373] env[62519]: value = "task-1801955" [ 1406.780373] env[62519]: _type = "Task" [ 1406.780373] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.791543] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801955, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.958707] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529f8989-1ce5-1a93-80ca-3b4abdeb3380, 'name': SearchDatastore_Task, 'duration_secs': 0.663271} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.959056] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.959313] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1406.959502] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.073467] env[62519]: DEBUG nova.network.neutron [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Successfully updated port: 18c7d589-708e-4a8d-b1f1-c646d532c6aa {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1407.125108] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522cf83d-599c-a613-ae2a-968c0924fa9d, 'name': SearchDatastore_Task, 'duration_secs': 0.019791} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.126182] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23d0a2b5-a940-44e2-9548-73d2d3466196 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.132463] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1407.132463] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294da9e-fb6b-3ad2-5477-365807b2359a" [ 1407.132463] env[62519]: _type = "Task" [ 1407.132463] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.139187] env[62519]: DEBUG nova.compute.manager [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1407.148806] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294da9e-fb6b-3ad2-5477-365807b2359a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.227918] env[62519]: INFO nova.compute.manager [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] instance snapshotting [ 1407.228143] env[62519]: WARNING nova.compute.manager [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1407.236275] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177a933a-8ade-45b8-a619-34f4ce8c8be6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.257132] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b538f973-37ac-4b4b-a0de-06ca59854e83 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.293269] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801954, 'name': CreateVM_Task, 'duration_secs': 0.447945} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.296407] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1407.297390] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.297594] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.297887] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1407.301296] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a52c46a3-2ec7-40ee-bec7-4bbe75893bd8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.303299] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801955, 'name': Rename_Task, 'duration_secs': 0.2128} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.303580] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1407.304249] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fba7278-9288-4b34-9b79-7dad350441f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.307657] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1407.307657] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527148a9-d966-57dd-749e-cc2cc4cceb72" [ 1407.307657] env[62519]: _type = "Task" [ 1407.307657] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.316014] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1407.316014] env[62519]: value = "task-1801956" [ 1407.316014] env[62519]: _type = "Task" [ 1407.316014] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.324432] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527148a9-d966-57dd-749e-cc2cc4cceb72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.331547] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801956, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.525026] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f5ec26-2388-4366-b986-609afb04e221 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.537116] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abff0736-988a-4d8d-b160-350be69ab925 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.571219] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42de608c-eba5-4e04-b03e-85ac85470b73 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.581884] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "refresh_cache-099112ae-569b-4853-bc47-b0b8b97d2525" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.581884] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "refresh_cache-099112ae-569b-4853-bc47-b0b8b97d2525" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.581884] env[62519]: DEBUG nova.network.neutron [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1407.581884] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e38848-f23b-4471-b702-1d744b4f69e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.605070] env[62519]: DEBUG nova.compute.provider_tree [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1407.644617] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294da9e-fb6b-3ad2-5477-365807b2359a, 'name': SearchDatastore_Task, 'duration_secs': 0.018835} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.644939] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.645319] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2b391628-18a2-4606-8c59-58ba642cee50/2b391628-18a2-4606-8c59-58ba642cee50.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1407.645531] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.645713] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1407.645934] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a747435-2502-4b95-946b-1b4ca9dc21c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.648273] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0018e55-d63f-4111-abba-252bb0738991 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.661238] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1407.661238] env[62519]: value = "task-1801957" [ 1407.661238] env[62519]: _type = "Task" [ 1407.661238] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.663087] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1407.663236] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1407.669870] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2de640cc-449e-4408-853a-b20afffef45d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.674624] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1407.674624] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d9f4f5-f363-1f61-78a7-abb3812d5fe7" [ 1407.674624] env[62519]: _type = "Task" [ 1407.674624] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.677192] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.689125] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d9f4f5-f363-1f61-78a7-abb3812d5fe7, 'name': SearchDatastore_Task, 'duration_secs': 0.009392} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.689125] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e27a9f3-bc59-412c-b987-0e8519bd6183 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.695341] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1407.695341] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b39e0b-29f1-9546-db95-2e5b11ff1214" [ 1407.695341] env[62519]: _type = "Task" [ 1407.695341] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.704470] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b39e0b-29f1-9546-db95-2e5b11ff1214, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.745581] env[62519]: DEBUG nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Received event network-vif-plugged-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1407.745657] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Acquiring lock "11d4a010-959f-4f53-94dc-7499007612ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.745837] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Lock "11d4a010-959f-4f53-94dc-7499007612ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.746027] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Lock "11d4a010-959f-4f53-94dc-7499007612ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.746613] env[62519]: DEBUG nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] No waiting events found dispatching network-vif-plugged-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1407.746857] env[62519]: WARNING nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Received unexpected event network-vif-plugged-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 for instance with vm_state building and task_state spawning. [ 1407.747129] env[62519]: DEBUG nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Received event network-changed-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1407.747314] env[62519]: DEBUG nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Refreshing instance network info cache due to event network-changed-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1407.747581] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Acquiring lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.747759] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Acquired lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.747951] env[62519]: DEBUG nova.network.neutron [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Refreshing network info cache for port ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1407.773296] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1407.773932] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-414cf12f-4a8f-4d63-9d43-62e2b0a7e3c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.781937] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1407.781937] env[62519]: value = "task-1801958" [ 1407.781937] env[62519]: _type = "Task" [ 1407.781937] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.794653] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801958, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.830929] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527148a9-d966-57dd-749e-cc2cc4cceb72, 'name': SearchDatastore_Task, 'duration_secs': 0.013816} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.832205] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.832476] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1407.832952] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.846779] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801956, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.895396] env[62519]: DEBUG nova.compute.manager [req-3c572f18-858d-43fc-820e-aaadfee0afb4 req-46994605-6981-4c08-a233-3dff3f259bc8 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Received event network-changed-ac832bcb-7bb4-49de-be18-a8fd0d8ee16e {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1407.895552] env[62519]: DEBUG nova.compute.manager [req-3c572f18-858d-43fc-820e-aaadfee0afb4 req-46994605-6981-4c08-a233-3dff3f259bc8 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Refreshing instance network info cache due to event network-changed-ac832bcb-7bb4-49de-be18-a8fd0d8ee16e. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1407.896223] env[62519]: DEBUG oslo_concurrency.lockutils [req-3c572f18-858d-43fc-820e-aaadfee0afb4 req-46994605-6981-4c08-a233-3dff3f259bc8 service nova] Acquiring lock "refresh_cache-2b391628-18a2-4606-8c59-58ba642cee50" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.896223] env[62519]: DEBUG oslo_concurrency.lockutils [req-3c572f18-858d-43fc-820e-aaadfee0afb4 req-46994605-6981-4c08-a233-3dff3f259bc8 service nova] Acquired lock "refresh_cache-2b391628-18a2-4606-8c59-58ba642cee50" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.897304] env[62519]: DEBUG nova.network.neutron [req-3c572f18-858d-43fc-820e-aaadfee0afb4 req-46994605-6981-4c08-a233-3dff3f259bc8 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Refreshing network info cache for port ac832bcb-7bb4-49de-be18-a8fd0d8ee16e {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1408.110072] env[62519]: DEBUG nova.scheduler.client.report [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1408.156635] env[62519]: DEBUG nova.compute.manager [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1408.180183] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801957, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.211359] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b39e0b-29f1-9546-db95-2e5b11ff1214, 'name': SearchDatastore_Task, 'duration_secs': 0.010772} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.214735] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1408.215152] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1408.215152] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1408.215397] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1408.215554] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1408.215701] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1408.215956] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1408.216274] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1408.216464] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1408.216661] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1408.216838] env[62519]: DEBUG nova.virt.hardware [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1408.217282] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.217551] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 11d4a010-959f-4f53-94dc-7499007612ad/11d4a010-959f-4f53-94dc-7499007612ad.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1408.218498] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18886202-f0ba-43c0-96c1-33d265c5fef0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.223581] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.223952] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1408.224348] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80c7cd2b-843d-4f57-aa4f-f2611a97816e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.228411] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0dbaef2b-a71c-4e25-af13-42868b98291d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.245706] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db88024a-d893-4a16-9b4c-c590b4e1fa6f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.254981] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1408.254981] env[62519]: value = "task-1801959" [ 1408.254981] env[62519]: _type = "Task" [ 1408.254981] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.257287] env[62519]: DEBUG nova.network.neutron [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1408.275352] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1408.277022] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1408.277022] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1408.283327] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Creating folder: Project (7be9a2fd53fc47bd8c23534a91763c89). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1408.284152] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c7e3dda-84ac-47cb-8af6-6c51074f61d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.287166] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-698dce71-c6cc-4656-8b28-7187f0753ff7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.295829] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.299910] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1408.299910] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5246f5c3-bdeb-9d2a-7dba-aced897b57cb" [ 1408.299910] env[62519]: _type = "Task" [ 1408.299910] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.303406] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801958, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.308148] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Created folder: Project (7be9a2fd53fc47bd8c23534a91763c89) in parent group-v373567. [ 1408.308369] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Creating folder: Instances. Parent ref: group-v373598. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1408.310903] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28b5f486-ed3f-49b5-aecb-1aa63920fadb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.324330] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "49221ea3-d457-4cf5-97a9-9ae74c4e86fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.324662] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "49221ea3-d457-4cf5-97a9-9ae74c4e86fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.324952] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5246f5c3-bdeb-9d2a-7dba-aced897b57cb, 'name': SearchDatastore_Task, 'duration_secs': 0.011742} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.332766] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23f073ad-b18b-4710-9884-59cb41461f28 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.335159] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Created folder: Instances in parent group-v373598. [ 1408.335551] env[62519]: DEBUG oslo.service.loopingcall [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1408.335970] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1408.336579] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cb83e76-3312-41b0-bab8-0436204a64b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.354888] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1408.354888] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5270fc0f-831c-1e22-de74-a780c965b22b" [ 1408.354888] env[62519]: _type = "Task" [ 1408.354888] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.355192] env[62519]: DEBUG oslo_vmware.api [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801956, 'name': PowerOnVM_Task, 'duration_secs': 0.884662} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.355684] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1408.355909] env[62519]: INFO nova.compute.manager [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Took 12.52 seconds to spawn the instance on the hypervisor. [ 1408.356103] env[62519]: DEBUG nova.compute.manager [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1408.360457] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d167abb-2ae0-485b-9a26-2e9fe4236761 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.364620] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1408.364620] env[62519]: value = "task-1801962" [ 1408.364620] env[62519]: _type = "Task" [ 1408.364620] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.373416] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5270fc0f-831c-1e22-de74-a780c965b22b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.387726] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801962, 'name': CreateVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.554536] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "021c3287-5887-457e-9b3a-233308fb9b23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.554536] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "021c3287-5887-457e-9b3a-233308fb9b23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.592614] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "56790738-4759-468a-9f43-f9c2bc2de23a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.594380] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "56790738-4759-468a-9f43-f9c2bc2de23a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.620083] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.620528] env[62519]: DEBUG nova.compute.manager [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1408.627111] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.838s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.628775] env[62519]: INFO nova.compute.claims [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1408.636473] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.636473] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.676108] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593994} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.677136] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2b391628-18a2-4606-8c59-58ba642cee50/2b391628-18a2-4606-8c59-58ba642cee50.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1408.677136] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1408.677136] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64bd1529-b8ed-4c70-9b1c-2d3a99e86213 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.685036] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1408.685036] env[62519]: value = "task-1801963" [ 1408.685036] env[62519]: _type = "Task" [ 1408.685036] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.693023] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801963, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.719516] env[62519]: DEBUG nova.network.neutron [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Updating instance_info_cache with network_info: [{"id": "18c7d589-708e-4a8d-b1f1-c646d532c6aa", "address": "fa:16:3e:d8:14:73", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18c7d589-70", "ovs_interfaceid": "18c7d589-708e-4a8d-b1f1-c646d532c6aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.771767] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801959, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.796917] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801958, 'name': CreateSnapshot_Task, 'duration_secs': 0.970783} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.797301] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1408.798055] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4e49ef-8481-4d6e-8f8d-670b852224b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.868974] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5270fc0f-831c-1e22-de74-a780c965b22b, 'name': SearchDatastore_Task, 'duration_secs': 0.100495} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.875020] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.875020] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1408.875020] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac723172-628f-46ee-9cff-9ca7e545ad61 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.885564] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801962, 'name': CreateVM_Task, 'duration_secs': 0.517425} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.889729] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1408.889729] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1408.889729] env[62519]: value = "task-1801964" [ 1408.889729] env[62519]: _type = "Task" [ 1408.889729] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.890155] env[62519]: INFO nova.compute.manager [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Took 26.98 seconds to build instance. [ 1408.891454] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1408.891610] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.891978] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1408.893160] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03e778a5-511c-412c-a1f5-fa33352880b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.904475] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1408.904475] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a12b4e-6e91-931c-1ff6-36639fa41ac6" [ 1408.904475] env[62519]: _type = "Task" [ 1408.904475] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.906351] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.916159] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a12b4e-6e91-931c-1ff6-36639fa41ac6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.011304] env[62519]: DEBUG nova.network.neutron [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updated VIF entry in instance network info cache for port ca1a3bbf-3f10-4a96-a67d-b77464ab25e7. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1409.011741] env[62519]: DEBUG nova.network.neutron [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updating instance_info_cache with network_info: [{"id": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "address": "fa:16:3e:cc:ef:21", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1a3bbf-3f", "ovs_interfaceid": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.056635] env[62519]: DEBUG nova.network.neutron [req-3c572f18-858d-43fc-820e-aaadfee0afb4 req-46994605-6981-4c08-a233-3dff3f259bc8 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Updated VIF entry in instance network info cache for port ac832bcb-7bb4-49de-be18-a8fd0d8ee16e. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1409.057037] env[62519]: DEBUG nova.network.neutron [req-3c572f18-858d-43fc-820e-aaadfee0afb4 req-46994605-6981-4c08-a233-3dff3f259bc8 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Updating instance_info_cache with network_info: [{"id": "ac832bcb-7bb4-49de-be18-a8fd0d8ee16e", "address": "fa:16:3e:b2:f6:9d", "network": {"id": "0413ee0f-d5e1-4c09-b98e-01bbb1481053", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1023814213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998210eef9e04b64a426b66c965130a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac832bcb-7b", "ovs_interfaceid": "ac832bcb-7bb4-49de-be18-a8fd0d8ee16e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.133750] env[62519]: DEBUG nova.compute.utils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1409.139364] env[62519]: DEBUG nova.compute.manager [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1409.139612] env[62519]: DEBUG nova.network.neutron [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1409.197594] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801963, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070887} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.197594] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1409.198479] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edf9c2f-a3e4-4e73-9513-15fd1001a10b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.226778] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 2b391628-18a2-4606-8c59-58ba642cee50/2b391628-18a2-4606-8c59-58ba642cee50.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1409.228617] env[62519]: DEBUG nova.policy [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b04fa80750184b97a16ec1880e0a585c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '683d60927fdf424386ffcfaa344a7af6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1409.230599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "refresh_cache-099112ae-569b-4853-bc47-b0b8b97d2525" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.231539] env[62519]: DEBUG nova.compute.manager [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Instance network_info: |[{"id": "18c7d589-708e-4a8d-b1f1-c646d532c6aa", "address": "fa:16:3e:d8:14:73", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18c7d589-70", "ovs_interfaceid": "18c7d589-708e-4a8d-b1f1-c646d532c6aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1409.231539] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-921518da-865d-45d8-b72b-e3999977fe44 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.252266] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:14:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18c7d589-708e-4a8d-b1f1-c646d532c6aa', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1409.258627] env[62519]: DEBUG oslo.service.loopingcall [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1409.259333] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1409.259940] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de7e35d2-83a5-4224-80d6-9b304d955e3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.277926] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1409.277926] env[62519]: value = "task-1801965" [ 1409.277926] env[62519]: _type = "Task" [ 1409.277926] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.286636] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1409.286636] env[62519]: value = "task-1801966" [ 1409.286636] env[62519]: _type = "Task" [ 1409.286636] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.296769] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801965, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.297105] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801959, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.842333} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.297782] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 11d4a010-959f-4f53-94dc-7499007612ad/11d4a010-959f-4f53-94dc-7499007612ad.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1409.298279] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1409.298408] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1a10972-3a24-42f7-82da-314d635df23e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.304056] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801966, 'name': CreateVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.309264] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1409.309264] env[62519]: value = "task-1801967" [ 1409.309264] env[62519]: _type = "Task" [ 1409.309264] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.317446] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1409.321230] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a638bf88-ab0a-4664-996d-307f0506adf4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.329725] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.331388] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1409.331388] env[62519]: value = "task-1801968" [ 1409.331388] env[62519]: _type = "Task" [ 1409.331388] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.340411] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801968, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.408173] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801964, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.420202] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a12b4e-6e91-931c-1ff6-36639fa41ac6, 'name': SearchDatastore_Task, 'duration_secs': 0.05143} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.420433] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.420974] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1409.420974] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.420974] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.421414] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1409.422030] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7582c40-2337-4a9d-b345-8b4e8e8ba921 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.437741] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1409.437834] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1409.438579] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-745f9ae9-196e-431d-b923-aae5412efc67 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.447985] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1409.447985] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e863e9-33ca-712a-49e3-e569445df50e" [ 1409.447985] env[62519]: _type = "Task" [ 1409.447985] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.462328] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e863e9-33ca-712a-49e3-e569445df50e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.515187] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Releasing lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.515513] env[62519]: DEBUG nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Received event network-vif-plugged-e6b9aab2-f105-4c06-b204-f0626f41ccbe {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1409.515711] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Acquiring lock "34d2991e-b6df-473d-8994-e45ff57ef131-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.515918] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Lock "34d2991e-b6df-473d-8994-e45ff57ef131-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.516197] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Lock "34d2991e-b6df-473d-8994-e45ff57ef131-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.516490] env[62519]: DEBUG nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] No waiting events found dispatching network-vif-plugged-e6b9aab2-f105-4c06-b204-f0626f41ccbe {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1409.516690] env[62519]: WARNING nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Received unexpected event network-vif-plugged-e6b9aab2-f105-4c06-b204-f0626f41ccbe for instance with vm_state building and task_state spawning. [ 1409.516858] env[62519]: DEBUG nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Received event network-changed-e6b9aab2-f105-4c06-b204-f0626f41ccbe {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1409.517103] env[62519]: DEBUG nova.compute.manager [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Refreshing instance network info cache due to event network-changed-e6b9aab2-f105-4c06-b204-f0626f41ccbe. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1409.518275] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Acquiring lock "refresh_cache-34d2991e-b6df-473d-8994-e45ff57ef131" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.518275] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Acquired lock "refresh_cache-34d2991e-b6df-473d-8994-e45ff57ef131" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.518275] env[62519]: DEBUG nova.network.neutron [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Refreshing network info cache for port e6b9aab2-f105-4c06-b204-f0626f41ccbe {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1409.563362] env[62519]: DEBUG oslo_concurrency.lockutils [req-3c572f18-858d-43fc-820e-aaadfee0afb4 req-46994605-6981-4c08-a233-3dff3f259bc8 service nova] Releasing lock "refresh_cache-2b391628-18a2-4606-8c59-58ba642cee50" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.563724] env[62519]: DEBUG nova.compute.manager [req-3c572f18-858d-43fc-820e-aaadfee0afb4 req-46994605-6981-4c08-a233-3dff3f259bc8 service nova] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Received event network-vif-deleted-f9e01aea-e1f6-4372-aa97-658cfe3480e2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1409.644716] env[62519]: DEBUG nova.compute.manager [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1409.695512] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquiring lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.695835] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.761099] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "681ef7a9-3b24-450a-9034-6d30177995d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.761536] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "681ef7a9-3b24-450a-9034-6d30177995d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.820316] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801965, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.824012] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801966, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.827661] env[62519]: DEBUG nova.network.neutron [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Successfully created port: f5bb4ca5-90d8-4da8-a8c0-bbaf14254969 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1409.832349] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.152946} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.836353] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1409.837714] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e70eb5-9cea-4120-80b3-776e23def468 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.846134] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801968, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.869595] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 11d4a010-959f-4f53-94dc-7499007612ad/11d4a010-959f-4f53-94dc-7499007612ad.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1409.870188] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a29dba47-c85d-48e2-aa72-90a3f6a2e5c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.894069] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1409.894069] env[62519]: value = "task-1801969" [ 1409.894069] env[62519]: _type = "Task" [ 1409.894069] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.908506] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801969, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.912270] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.926416} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.912560] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1409.912802] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1409.912994] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1e87076-63c0-4d5a-b5e9-f4311103e43f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.923552] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1409.923552] env[62519]: value = "task-1801970" [ 1409.923552] env[62519]: _type = "Task" [ 1409.923552] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.931809] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801970, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.961023] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e863e9-33ca-712a-49e3-e569445df50e, 'name': SearchDatastore_Task, 'duration_secs': 0.059709} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.961259] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cba447f7-82d5-4167-bbfc-47a04cc8277d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.970363] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1409.970363] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52939b6e-c64d-e43d-efb0-dec8d136a747" [ 1409.970363] env[62519]: _type = "Task" [ 1409.970363] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.978884] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52939b6e-c64d-e43d-efb0-dec8d136a747, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.179928] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ddee53-280a-4cd3-ac8e-f576331d2f5e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.191029] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfbf1aa-acf5-40bd-9238-0a70c86535f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.233682] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd765145-54a4-4a9c-8dc5-2b695156eb21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.244617] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c032a22e-9ec8-4a1b-bcd5-5a4e4f1d411e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.255650] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.255909] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.265938] env[62519]: DEBUG nova.compute.provider_tree [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1410.295054] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801965, 'name': ReconfigVM_Task, 'duration_secs': 0.828182} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.295433] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 2b391628-18a2-4606-8c59-58ba642cee50/2b391628-18a2-4606-8c59-58ba642cee50.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1410.296237] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec2ddda0-2e0b-43db-a155-1cc53b9ccf41 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.304143] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1410.304143] env[62519]: value = "task-1801971" [ 1410.304143] env[62519]: _type = "Task" [ 1410.304143] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.316009] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801971, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.319068] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801966, 'name': CreateVM_Task, 'duration_secs': 0.820569} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.319230] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1410.319872] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.320044] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.320441] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1410.320598] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edf6ad03-19ff-4206-829d-39278c3ef287 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.325160] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1410.325160] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fc4955-6588-0953-0925-dd2469fa093b" [ 1410.325160] env[62519]: _type = "Task" [ 1410.325160] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.336150] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fc4955-6588-0953-0925-dd2469fa093b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.344407] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801968, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.408291] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801969, 'name': ReconfigVM_Task, 'duration_secs': 0.334665} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.408291] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 11d4a010-959f-4f53-94dc-7499007612ad/11d4a010-959f-4f53-94dc-7499007612ad.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1410.408291] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7d1803f-14b7-4f36-a854-16c594fb2099 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.413426] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8efc79a-e5f9-47fc-b2c3-ac5d306260b4 tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.520s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.413752] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1410.413752] env[62519]: value = "task-1801972" [ 1410.413752] env[62519]: _type = "Task" [ 1410.413752] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.423916] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801972, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.433668] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801970, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079604} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.433914] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1410.434863] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2544510d-84c8-4954-a9b8-1242a7437591 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.460720] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1410.461785] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97d63e40-ed6a-440b-9c41-8759df0b5132 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.491272] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52939b6e-c64d-e43d-efb0-dec8d136a747, 'name': SearchDatastore_Task, 'duration_secs': 0.016782} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.491968] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.492277] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1410.492595] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1410.492595] env[62519]: value = "task-1801973" [ 1410.492595] env[62519]: _type = "Task" [ 1410.492595] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.492785] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9dbb9ae-0105-4517-b5f2-f36b343f1202 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.503240] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801973, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.504528] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1410.504528] env[62519]: value = "task-1801974" [ 1410.504528] env[62519]: _type = "Task" [ 1410.504528] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.513412] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.546240] env[62519]: DEBUG nova.network.neutron [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Updated VIF entry in instance network info cache for port e6b9aab2-f105-4c06-b204-f0626f41ccbe. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1410.547424] env[62519]: DEBUG nova.network.neutron [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Updating instance_info_cache with network_info: [{"id": "e6b9aab2-f105-4c06-b204-f0626f41ccbe", "address": "fa:16:3e:ed:4a:67", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b9aab2-f1", "ovs_interfaceid": "e6b9aab2-f105-4c06-b204-f0626f41ccbe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.667781] env[62519]: DEBUG nova.compute.manager [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1410.698136] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1410.698384] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1410.698520] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1410.698779] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1410.698845] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1410.698994] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1410.699593] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1410.699762] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1410.699954] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1410.700093] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1410.700384] env[62519]: DEBUG nova.virt.hardware [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1410.701221] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cb4600-d63e-457e-b0e4-895946836ad2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.709348] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bdf44f-b3c8-4609-954a-844f365ef3aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.799970] env[62519]: ERROR nova.scheduler.client.report [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [req-b8c903be-c72b-4c28-914c-9f5b73ca9608] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b8c903be-c72b-4c28-914c-9f5b73ca9608"}]} [ 1410.816395] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801971, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.818964] env[62519]: DEBUG nova.scheduler.client.report [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1410.836753] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fc4955-6588-0953-0925-dd2469fa093b, 'name': SearchDatastore_Task, 'duration_secs': 0.012666} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.842035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.842035] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1410.842035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.842035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.842351] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1410.842351] env[62519]: DEBUG nova.scheduler.client.report [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1410.842351] env[62519]: DEBUG nova.compute.provider_tree [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1410.844293] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37db3d81-53f5-4fd8-a5dc-25812841876f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.852832] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801968, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.854269] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1410.854447] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1410.855294] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-753cb848-6cc9-4d0a-b2d9-b697579df9fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.861107] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1410.861107] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5271e5e3-99e2-fb18-500c-2b49f1630e93" [ 1410.861107] env[62519]: _type = "Task" [ 1410.861107] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.867944] env[62519]: DEBUG nova.scheduler.client.report [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1410.873544] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5271e5e3-99e2-fb18-500c-2b49f1630e93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.893454] env[62519]: DEBUG nova.scheduler.client.report [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1410.920425] env[62519]: DEBUG nova.compute.manager [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1410.935582] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801972, 'name': Rename_Task, 'duration_secs': 0.188848} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.935690] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1410.936622] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd0c95b3-1cc6-430f-9fdc-dfbca42e1822 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.945893] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1410.945893] env[62519]: value = "task-1801975" [ 1410.945893] env[62519]: _type = "Task" [ 1410.945893] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.954721] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.008087] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.019597] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.049038] env[62519]: DEBUG oslo_concurrency.lockutils [req-647329bc-ea96-45ee-9ea8-ea1487131f90 req-ec68474b-2c32-4b91-a832-eb1a0d3e05bc service nova] Releasing lock "refresh_cache-34d2991e-b6df-473d-8994-e45ff57ef131" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.286235] env[62519]: DEBUG nova.compute.manager [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Received event network-vif-plugged-18c7d589-708e-4a8d-b1f1-c646d532c6aa {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1411.286441] env[62519]: DEBUG oslo_concurrency.lockutils [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] Acquiring lock "099112ae-569b-4853-bc47-b0b8b97d2525-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.286780] env[62519]: DEBUG oslo_concurrency.lockutils [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] Lock "099112ae-569b-4853-bc47-b0b8b97d2525-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.286855] env[62519]: DEBUG oslo_concurrency.lockutils [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] Lock "099112ae-569b-4853-bc47-b0b8b97d2525-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.287165] env[62519]: DEBUG nova.compute.manager [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] No waiting events found dispatching network-vif-plugged-18c7d589-708e-4a8d-b1f1-c646d532c6aa {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1411.288441] env[62519]: WARNING nova.compute.manager [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Received unexpected event network-vif-plugged-18c7d589-708e-4a8d-b1f1-c646d532c6aa for instance with vm_state building and task_state spawning. [ 1411.288441] env[62519]: DEBUG nova.compute.manager [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Received event network-changed-18c7d589-708e-4a8d-b1f1-c646d532c6aa {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1411.288441] env[62519]: DEBUG nova.compute.manager [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Refreshing instance network info cache due to event network-changed-18c7d589-708e-4a8d-b1f1-c646d532c6aa. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1411.288441] env[62519]: DEBUG oslo_concurrency.lockutils [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] Acquiring lock "refresh_cache-099112ae-569b-4853-bc47-b0b8b97d2525" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.288441] env[62519]: DEBUG oslo_concurrency.lockutils [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] Acquired lock "refresh_cache-099112ae-569b-4853-bc47-b0b8b97d2525" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.288773] env[62519]: DEBUG nova.network.neutron [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Refreshing network info cache for port 18c7d589-708e-4a8d-b1f1-c646d532c6aa {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1411.320506] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801971, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.325363] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29c4479-c37a-4061-b795-e3f4690900a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.333096] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff5f576-be56-497d-b3e6-96f671bd605e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.345445] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1801968, 'name': CloneVM_Task, 'duration_secs': 1.764375} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.375366] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Created linked-clone VM from snapshot [ 1411.377176] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95f377c-2ff2-4563-a3ab-c4202ca517b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.385341] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fc83e8-c3e2-4edd-a77a-51978ce1b2bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.393339] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5271e5e3-99e2-fb18-500c-2b49f1630e93, 'name': SearchDatastore_Task, 'duration_secs': 0.008605} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.400458] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Uploading image f89b79a3-eee9-4e62-ac73-98c46676969e {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1411.402629] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7e295c2-49b1-46c9-99ce-7a2dbe618951 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.406609] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7a08dd-b0a2-4ab6-a91f-9df52060e9ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.413723] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1411.413723] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f7c81e-a673-a05e-0855-c65761a52d65" [ 1411.413723] env[62519]: _type = "Task" [ 1411.413723] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.422191] env[62519]: DEBUG nova.compute.provider_tree [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1411.442036] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f7c81e-a673-a05e-0855-c65761a52d65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.445247] env[62519]: DEBUG oslo_vmware.rw_handles [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1411.445247] env[62519]: value = "vm-373603" [ 1411.445247] env[62519]: _type = "VirtualMachine" [ 1411.445247] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1411.445247] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0fd3034d-ffce-453f-99d9-25ece5cf07c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.456424] env[62519]: DEBUG oslo_vmware.api [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1801975, 'name': PowerOnVM_Task, 'duration_secs': 0.500659} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.458583] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.458892] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1411.459119] env[62519]: INFO nova.compute.manager [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Took 10.80 seconds to spawn the instance on the hypervisor. [ 1411.459318] env[62519]: DEBUG nova.compute.manager [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1411.459663] env[62519]: DEBUG oslo_vmware.rw_handles [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lease: (returnval){ [ 1411.459663] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294fa71-f03f-01a1-15df-64f820dd78d3" [ 1411.459663] env[62519]: _type = "HttpNfcLease" [ 1411.459663] env[62519]: } obtained for exporting VM: (result){ [ 1411.459663] env[62519]: value = "vm-373603" [ 1411.459663] env[62519]: _type = "VirtualMachine" [ 1411.459663] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1411.459905] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the lease: (returnval){ [ 1411.459905] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294fa71-f03f-01a1-15df-64f820dd78d3" [ 1411.459905] env[62519]: _type = "HttpNfcLease" [ 1411.459905] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1411.460585] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217fac31-6819-48cd-a6c5-1e118f7bdf5e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.470790] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1411.470790] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294fa71-f03f-01a1-15df-64f820dd78d3" [ 1411.470790] env[62519]: _type = "HttpNfcLease" [ 1411.470790] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1411.493202] env[62519]: DEBUG nova.compute.manager [None req-af8d63fb-d2a4-40a0-9743-43f129873348 tempest-ServerDiagnosticsTest-588586305 tempest-ServerDiagnosticsTest-588586305-project-admin] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1411.493202] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020ded76-3777-4b2a-bc38-594bfe62ce6e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.507507] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.507791] env[62519]: INFO nova.compute.manager [None req-af8d63fb-d2a4-40a0-9743-43f129873348 tempest-ServerDiagnosticsTest-588586305 tempest-ServerDiagnosticsTest-588586305-project-admin] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Retrieving diagnostics [ 1411.511645] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d64fef-ad85-4421-b642-3dd737c541e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.519208] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.709441] env[62519]: DEBUG nova.network.neutron [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Successfully updated port: f5bb4ca5-90d8-4da8-a8c0-bbaf14254969 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1411.820098] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801971, 'name': Rename_Task, 'duration_secs': 1.211898} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.820603] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1411.820997] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73f1856d-59d7-4591-9a1f-83669efbc1ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.831763] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1411.831763] env[62519]: value = "task-1801977" [ 1411.831763] env[62519]: _type = "Task" [ 1411.831763] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.842079] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801977, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.936910] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f7c81e-a673-a05e-0855-c65761a52d65, 'name': SearchDatastore_Task, 'duration_secs': 0.028276} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.937807] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.938574] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 099112ae-569b-4853-bc47-b0b8b97d2525/099112ae-569b-4853-bc47-b0b8b97d2525.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1411.938914] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1205311b-bf06-408a-b561-5a8504cf4266 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.950051] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1411.950051] env[62519]: value = "task-1801978" [ 1411.950051] env[62519]: _type = "Task" [ 1411.950051] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.968649] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.984641] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1411.984641] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294fa71-f03f-01a1-15df-64f820dd78d3" [ 1411.984641] env[62519]: _type = "HttpNfcLease" [ 1411.984641] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1411.987710] env[62519]: DEBUG nova.scheduler.client.report [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 33 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1411.988794] env[62519]: DEBUG nova.compute.provider_tree [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 33 to 34 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1411.988794] env[62519]: DEBUG nova.compute.provider_tree [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1411.991974] env[62519]: DEBUG oslo_vmware.rw_handles [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1411.991974] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5294fa71-f03f-01a1-15df-64f820dd78d3" [ 1411.991974] env[62519]: _type = "HttpNfcLease" [ 1411.991974] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1411.993205] env[62519]: INFO nova.compute.manager [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Took 26.42 seconds to build instance. [ 1411.997535] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4013967d-c648-454b-9dd6-600e1639627f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.018549] env[62519]: DEBUG oslo_vmware.rw_handles [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52773c87-f717-b6e8-3480-e8574de4125a/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1412.018795] env[62519]: DEBUG oslo_vmware.rw_handles [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52773c87-f717-b6e8-3480-e8574de4125a/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1412.025804] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.030985] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801974, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.141726] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-13bf2e00-7180-45cd-a17e-2befa0962bfa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.167252] env[62519]: DEBUG nova.network.neutron [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Updated VIF entry in instance network info cache for port 18c7d589-708e-4a8d-b1f1-c646d532c6aa. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1412.167753] env[62519]: DEBUG nova.network.neutron [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Updating instance_info_cache with network_info: [{"id": "18c7d589-708e-4a8d-b1f1-c646d532c6aa", "address": "fa:16:3e:d8:14:73", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18c7d589-70", "ovs_interfaceid": "18c7d589-708e-4a8d-b1f1-c646d532c6aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.215794] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-31f55ece-82e6-40ad-ad7c-1af645f307bf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.216090] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-31f55ece-82e6-40ad-ad7c-1af645f307bf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.216314] env[62519]: DEBUG nova.network.neutron [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.349734] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801977, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.463262] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801978, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.506456] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.883s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.507036] env[62519]: DEBUG nova.compute.manager [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1412.521546] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.148s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.522015] env[62519]: DEBUG nova.objects.instance [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Lazy-loading 'resources' on Instance uuid f6e29557-05ad-4a11-bd01-0315926c0413 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1412.564633] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801974, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.6895} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.564989] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801973, 'name': ReconfigVM_Task, 'duration_secs': 1.842851} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.565340] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1412.565602] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1412.565959] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1412.568458] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e654fe1d-41ad-4065-953b-b4b60b58e172 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.569070] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95524404-9a62-4e6c-ada0-e1aac380b129 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.577584] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1412.577584] env[62519]: value = "task-1801979" [ 1412.577584] env[62519]: _type = "Task" [ 1412.577584] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.580133] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1412.580133] env[62519]: value = "task-1801980" [ 1412.580133] env[62519]: _type = "Task" [ 1412.580133] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.611301] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801980, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.611670] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.671259] env[62519]: DEBUG oslo_concurrency.lockutils [req-9dee9e0c-013c-419e-ae1a-ca7f579bdd87 req-f72c2b79-bc46-490b-b79a-493b9c6bc0b8 service nova] Releasing lock "refresh_cache-099112ae-569b-4853-bc47-b0b8b97d2525" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.763185] env[62519]: DEBUG nova.network.neutron [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1412.794149] env[62519]: DEBUG nova.compute.manager [req-20aa3991-dfd4-466e-8a4a-cc984b13d4c4 req-49d4fcef-b6ad-4744-bfa0-783f76bd2265 service nova] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Received event network-vif-plugged-f5bb4ca5-90d8-4da8-a8c0-bbaf14254969 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1412.794495] env[62519]: DEBUG oslo_concurrency.lockutils [req-20aa3991-dfd4-466e-8a4a-cc984b13d4c4 req-49d4fcef-b6ad-4744-bfa0-783f76bd2265 service nova] Acquiring lock "31f55ece-82e6-40ad-ad7c-1af645f307bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.794622] env[62519]: DEBUG oslo_concurrency.lockutils [req-20aa3991-dfd4-466e-8a4a-cc984b13d4c4 req-49d4fcef-b6ad-4744-bfa0-783f76bd2265 service nova] Lock "31f55ece-82e6-40ad-ad7c-1af645f307bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.794815] env[62519]: DEBUG oslo_concurrency.lockutils [req-20aa3991-dfd4-466e-8a4a-cc984b13d4c4 req-49d4fcef-b6ad-4744-bfa0-783f76bd2265 service nova] Lock "31f55ece-82e6-40ad-ad7c-1af645f307bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.795176] env[62519]: DEBUG nova.compute.manager [req-20aa3991-dfd4-466e-8a4a-cc984b13d4c4 req-49d4fcef-b6ad-4744-bfa0-783f76bd2265 service nova] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] No waiting events found dispatching network-vif-plugged-f5bb4ca5-90d8-4da8-a8c0-bbaf14254969 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1412.795259] env[62519]: WARNING nova.compute.manager [req-20aa3991-dfd4-466e-8a4a-cc984b13d4c4 req-49d4fcef-b6ad-4744-bfa0-783f76bd2265 service nova] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Received unexpected event network-vif-plugged-f5bb4ca5-90d8-4da8-a8c0-bbaf14254969 for instance with vm_state building and task_state spawning. [ 1412.849653] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801977, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.960820] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801978, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.789009} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.961135] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 099112ae-569b-4853-bc47-b0b8b97d2525/099112ae-569b-4853-bc47-b0b8b97d2525.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1412.961324] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1412.961569] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eadc96e0-5af6-4acc-b801-79e3f1442614 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.968331] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1412.968331] env[62519]: value = "task-1801981" [ 1412.968331] env[62519]: _type = "Task" [ 1412.968331] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.979675] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.043478] env[62519]: DEBUG nova.compute.utils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1413.046124] env[62519]: DEBUG nova.compute.manager [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1413.046294] env[62519]: DEBUG nova.network.neutron [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1413.105192] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121422} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.106809] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1413.114214] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e317fc-aa45-42ee-93d6-d8cbb8e7a751 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.117229] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801980, 'name': Rename_Task, 'duration_secs': 0.305956} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.117794] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1413.119482] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0620235-7ec0-4d4e-9c76-47b600b66851 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.139609] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1413.141343] env[62519]: DEBUG nova.network.neutron [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Updating instance_info_cache with network_info: [{"id": "f5bb4ca5-90d8-4da8-a8c0-bbaf14254969", "address": "fa:16:3e:4c:e7:81", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5bb4ca5-90", "ovs_interfaceid": "f5bb4ca5-90d8-4da8-a8c0-bbaf14254969", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.146273] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5583313a-50d1-4546-bca8-90a10c0130e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.170879] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1413.170879] env[62519]: value = "task-1801982" [ 1413.170879] env[62519]: _type = "Task" [ 1413.170879] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.180079] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1413.180079] env[62519]: value = "task-1801983" [ 1413.180079] env[62519]: _type = "Task" [ 1413.180079] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.187632] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801982, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.196672] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.201542] env[62519]: DEBUG nova.policy [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '062e47ab0e39404b99924877d3900c7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8929cfd4892c4e399062483665500dd9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1413.352029] env[62519]: DEBUG oslo_vmware.api [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1801977, 'name': PowerOnVM_Task, 'duration_secs': 1.086606} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.352029] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1413.352029] env[62519]: INFO nova.compute.manager [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Took 15.11 seconds to spawn the instance on the hypervisor. [ 1413.352029] env[62519]: DEBUG nova.compute.manager [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1413.352029] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8dcba2-88bb-4548-84c9-3673ad19e51e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.479782] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147125} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.480138] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1413.481740] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0412aeaa-78cd-4642-9e99-57edc086d53a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.516684] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 099112ae-569b-4853-bc47-b0b8b97d2525/099112ae-569b-4853-bc47-b0b8b97d2525.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1413.521117] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab5bdd56-0bf9-4b2e-9bca-a09f2c1d7cfa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.541599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94daf7e7-f2b3-4130-bb71-0b7053690311 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "11d4a010-959f-4f53-94dc-7499007612ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.031s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.549136] env[62519]: DEBUG nova.compute.manager [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1413.557258] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1413.557258] env[62519]: value = "task-1801984" [ 1413.557258] env[62519]: _type = "Task" [ 1413.557258] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.574192] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801984, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.631871] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48270c3-abac-4f03-82e7-da63f4f16ff6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.640446] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d94c05-4f67-4634-8dc3-f29d547bedaa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.685292] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-31f55ece-82e6-40ad-ad7c-1af645f307bf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.685604] env[62519]: DEBUG nova.compute.manager [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Instance network_info: |[{"id": "f5bb4ca5-90d8-4da8-a8c0-bbaf14254969", "address": "fa:16:3e:4c:e7:81", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5bb4ca5-90", "ovs_interfaceid": "f5bb4ca5-90d8-4da8-a8c0-bbaf14254969", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1413.686758] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:e7:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5bb4ca5-90d8-4da8-a8c0-bbaf14254969', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1413.696539] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Creating folder: Project (683d60927fdf424386ffcfaa344a7af6). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1413.703531] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7971ac9b-5592-43e8-819e-2342d05ae89c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.706058] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4948eb1-a1b9-4f3e-89c5-f055c7a73e0e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.708045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquiring lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.708733] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.708733] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquiring lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.708862] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.709042] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.712696] env[62519]: INFO nova.compute.manager [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Terminating instance [ 1413.717648] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801982, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.726646] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.727896] env[62519]: DEBUG nova.network.neutron [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Successfully created port: d260fe42-dd87-4c6b-a292-ce14b9314974 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.731581] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637e3a0f-6a50-4916-a176-3b5b3474e89e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.738386] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Created folder: Project (683d60927fdf424386ffcfaa344a7af6) in parent group-v373567. [ 1413.738537] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Creating folder: Instances. Parent ref: group-v373604. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1413.738689] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e370a7df-b0d1-470e-aa2c-8a5747e4e682 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.750431] env[62519]: DEBUG nova.compute.provider_tree [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.760498] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Created folder: Instances in parent group-v373604. [ 1413.760498] env[62519]: DEBUG oslo.service.loopingcall [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1413.760498] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1413.760678] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fb23bc9-5a8e-4e72-bd2c-f39b4dee727b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.781132] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1413.781132] env[62519]: value = "task-1801987" [ 1413.781132] env[62519]: _type = "Task" [ 1413.781132] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.790584] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801987, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.871984] env[62519]: INFO nova.compute.manager [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Took 30.08 seconds to build instance. [ 1414.047788] env[62519]: DEBUG nova.compute.manager [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1414.078363] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801984, 'name': ReconfigVM_Task, 'duration_secs': 0.52129} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.078363] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 099112ae-569b-4853-bc47-b0b8b97d2525/099112ae-569b-4853-bc47-b0b8b97d2525.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1414.079704] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3a38d78-a41c-413a-b64f-0e1402566d8e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.092752] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1414.092752] env[62519]: value = "task-1801988" [ 1414.092752] env[62519]: _type = "Task" [ 1414.092752] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.110022] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801988, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.214958] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801983, 'name': ReconfigVM_Task, 'duration_secs': 0.730288} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.215463] env[62519]: DEBUG oslo_vmware.api [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801982, 'name': PowerOnVM_Task, 'duration_secs': 0.891571} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.216513] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Reconfigured VM instance instance-0000000c to attach disk [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1414.216513] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1414.216513] env[62519]: INFO nova.compute.manager [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Took 11.05 seconds to spawn the instance on the hypervisor. [ 1414.216730] env[62519]: DEBUG nova.compute.manager [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1414.217014] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5a3291e-b908-4cf3-af0e-6ac2601cf5c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.219651] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8749ef77-71bd-42fb-9865-74c9fceaaa38 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.233224] env[62519]: DEBUG nova.compute.manager [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1414.233224] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1414.243239] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454f9cdc-02f6-42e6-876b-7776ba6fc71b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.246833] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1414.246833] env[62519]: value = "task-1801989" [ 1414.246833] env[62519]: _type = "Task" [ 1414.246833] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.261040] env[62519]: DEBUG nova.scheduler.client.report [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1414.264135] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1414.268548] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7535614-e369-404d-99b2-ff7b8351ef97 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.271460] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801989, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.277876] env[62519]: DEBUG oslo_vmware.api [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1414.277876] env[62519]: value = "task-1801990" [ 1414.277876] env[62519]: _type = "Task" [ 1414.277876] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.292400] env[62519]: DEBUG oslo_vmware.api [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.307693] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1801987, 'name': CreateVM_Task, 'duration_secs': 0.447984} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.307693] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1414.307693] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.307693] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.307693] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1414.307693] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-767ba2b6-8c22-41ea-9861-42af3d1e51de {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.308180] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1414.308180] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256f913-d68c-f627-c6b0-c348947cc4da" [ 1414.308180] env[62519]: _type = "Task" [ 1414.308180] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.311613] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256f913-d68c-f627-c6b0-c348947cc4da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.427377] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquiring lock "f7d5c77d-6c78-4969-b511-2b03ab624c84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.428503] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "f7d5c77d-6c78-4969-b511-2b03ab624c84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.570108] env[62519]: DEBUG nova.compute.manager [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1414.583159] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.607782] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801988, 'name': Rename_Task, 'duration_secs': 0.209178} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.613177] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T08:11:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='39877551',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1888302990',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1414.614315] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.614315] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1414.614315] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.614315] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1414.614315] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1414.615652] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1414.615652] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1414.615652] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1414.615652] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1414.615652] env[62519]: DEBUG nova.virt.hardware [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1414.615870] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1414.618668] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4112fd10-aa5c-42a3-88d8-3ce6df92a1cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.622945] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-493122e0-8c52-41a4-acce-991859123f19 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.633177] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1414.633177] env[62519]: value = "task-1801991" [ 1414.633177] env[62519]: _type = "Task" [ 1414.633177] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.634603] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921435d9-aa73-46c3-9c5a-674786843b16 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.660952] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801991, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.766365] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.248s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.769333] env[62519]: INFO nova.compute.manager [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Took 27.06 seconds to build instance. [ 1414.770513] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 13.720s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.780624] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801989, 'name': Rename_Task, 'duration_secs': 0.192368} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.789367] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1414.789837] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14919e3f-40a7-47b6-a4d7-52d81c2feb78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.798321] env[62519]: DEBUG oslo_vmware.api [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801990, 'name': PowerOffVM_Task, 'duration_secs': 0.207346} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.800847] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1414.801467] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1414.801467] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1414.801467] env[62519]: value = "task-1801992" [ 1414.801467] env[62519]: _type = "Task" [ 1414.801467] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.801680] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dac80e4b-a2e1-4f9f-ae1d-7b2cc70b29c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.819396] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801992, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.824815] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256f913-d68c-f627-c6b0-c348947cc4da, 'name': SearchDatastore_Task, 'duration_secs': 0.031123} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.825807] env[62519]: INFO nova.scheduler.client.report [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Deleted allocations for instance f6e29557-05ad-4a11-bd01-0315926c0413 [ 1414.827257] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.827526] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1414.827763] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.827902] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.828089] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1414.828369] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a45bd865-f88d-4e4a-9edc-e7d5560e6a90 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.836950] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1414.837153] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1414.837895] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef25d98c-8baa-4235-9575-baf728246ef8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.845441] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1414.845441] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526cd250-dae9-2df9-4e72-44e5f76a3b02" [ 1414.845441] env[62519]: _type = "Task" [ 1414.845441] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.856363] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526cd250-dae9-2df9-4e72-44e5f76a3b02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.887681] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1414.888446] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1414.888446] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Deleting the datastore file [datastore1] 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1414.888581] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64ef5eef-5cc8-4509-abde-d81609b9c2a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.896061] env[62519]: DEBUG oslo_vmware.api [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for the task: (returnval){ [ 1414.896061] env[62519]: value = "task-1801994" [ 1414.896061] env[62519]: _type = "Task" [ 1414.896061] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.908961] env[62519]: DEBUG oslo_vmware.api [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.147963] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801991, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.287408] env[62519]: INFO nova.compute.claims [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.315689] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801992, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.317998] env[62519]: DEBUG nova.compute.manager [None req-77b93190-5a04-46f8-84df-e4a5309c1ada tempest-ServerDiagnosticsV248Test-1358864958 tempest-ServerDiagnosticsV248Test-1358864958-project-admin] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1415.319706] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e614f51-0d12-4195-b898-0d07baa5c367 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.330408] env[62519]: INFO nova.compute.manager [None req-77b93190-5a04-46f8-84df-e4a5309c1ada tempest-ServerDiagnosticsV248Test-1358864958 tempest-ServerDiagnosticsV248Test-1358864958-project-admin] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Retrieving diagnostics [ 1415.331255] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf63f63-bb2e-4993-8060-246df82bcb5f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.379757] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88c1f75f-e69f-401c-9a65-440f1169c628 tempest-DeleteServersAdminTestJSON-1841953118 tempest-DeleteServersAdminTestJSON-1841953118-project-admin] Lock "f6e29557-05ad-4a11-bd01-0315926c0413" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.077s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.385682] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2bcdd96e-4e47-4b74-bed4-3230c87035c0 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "2b391628-18a2-4606-8c59-58ba642cee50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.605s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.397090] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526cd250-dae9-2df9-4e72-44e5f76a3b02, 'name': SearchDatastore_Task, 'duration_secs': 0.012289} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.400976] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c331c50-ac86-4e9a-b241-a13f01abe533 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.417299] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1415.417299] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52341e0e-a4dc-0c84-7391-6a4103eb61cb" [ 1415.417299] env[62519]: _type = "Task" [ 1415.417299] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.417299] env[62519]: DEBUG oslo_vmware.api [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Task: {'id': task-1801994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.255595} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.417299] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1415.417299] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1415.417299] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1415.417625] env[62519]: INFO nova.compute.manager [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1415.417625] env[62519]: DEBUG oslo.service.loopingcall [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1415.420376] env[62519]: DEBUG nova.compute.manager [-] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1415.420376] env[62519]: DEBUG nova.network.neutron [-] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1415.428149] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52341e0e-a4dc-0c84-7391-6a4103eb61cb, 'name': SearchDatastore_Task, 'duration_secs': 0.011262} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.428548] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.428789] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 31f55ece-82e6-40ad-ad7c-1af645f307bf/31f55ece-82e6-40ad-ad7c-1af645f307bf.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1415.428992] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61b6d3c4-f339-470d-9f8f-cc287d647598 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.436377] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1415.436377] env[62519]: value = "task-1801995" [ 1415.436377] env[62519]: _type = "Task" [ 1415.436377] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.444608] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801995, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.564631] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "1462d213-3f9a-4c60-8056-0b68f20a4939" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.565181] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "1462d213-3f9a-4c60-8056-0b68f20a4939" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.649408] env[62519]: DEBUG oslo_vmware.api [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1801991, 'name': PowerOnVM_Task, 'duration_secs': 0.688247} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.652499] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1415.652642] env[62519]: INFO nova.compute.manager [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Took 9.98 seconds to spawn the instance on the hypervisor. [ 1415.652840] env[62519]: DEBUG nova.compute.manager [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1415.653693] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7082c9-3ba2-4a30-8846-05ddf45bdd99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.793176] env[62519]: INFO nova.compute.resource_tracker [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating resource usage from migration f90a3c1a-0ac3-49d8-8f4b-3ad7b5d43ffd [ 1415.815889] env[62519]: DEBUG oslo_vmware.api [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1801992, 'name': PowerOnVM_Task, 'duration_secs': 0.558831} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.816234] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1415.816720] env[62519]: INFO nova.compute.manager [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Took 7.66 seconds to spawn the instance on the hypervisor. [ 1415.816720] env[62519]: DEBUG nova.compute.manager [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1415.817685] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22dc369-9fad-417a-adc7-882f4187e4bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.900081] env[62519]: DEBUG nova.compute.manager [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1415.953581] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801995, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.180657] env[62519]: INFO nova.compute.manager [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Took 20.33 seconds to build instance. [ 1416.283317] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0e68dd-2aa4-4309-84c5-3072ca2b5eec tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "34d2991e-b6df-473d-8994-e45ff57ef131" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.601s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.344653] env[62519]: INFO nova.compute.manager [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Took 19.45 seconds to build instance. [ 1416.356981] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f891a9b5-d228-4421-8d8e-aedec0e7fdcc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.364450] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65389c56-a55e-4e8a-a2eb-d70bd9fb9634 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.409432] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0933f05a-8f61-4e9f-af5c-7222385632db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.419045] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc9f337-c196-4592-9b20-b703421ff6a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.426211] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.439064] env[62519]: DEBUG nova.compute.provider_tree [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.449803] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801995, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73568} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.451826] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 31f55ece-82e6-40ad-ad7c-1af645f307bf/31f55ece-82e6-40ad-ad7c-1af645f307bf.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1416.451826] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1416.451948] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77255fd4-6582-49ff-aaa9-7ba1c9bc7bc7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.459086] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1416.459086] env[62519]: value = "task-1801996" [ 1416.459086] env[62519]: _type = "Task" [ 1416.459086] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.467527] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801996, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.774160] env[62519]: DEBUG nova.network.neutron [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Successfully updated port: d260fe42-dd87-4c6b-a292-ce14b9314974 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1416.788214] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1416.816665] env[62519]: DEBUG nova.compute.manager [req-1fc67a81-a467-4cf7-90b3-428c222deafe req-7e2c745a-253d-475a-95ca-5236bc3ff6de service nova] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Received event network-changed-f5bb4ca5-90d8-4da8-a8c0-bbaf14254969 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1416.816952] env[62519]: DEBUG nova.compute.manager [req-1fc67a81-a467-4cf7-90b3-428c222deafe req-7e2c745a-253d-475a-95ca-5236bc3ff6de service nova] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Refreshing instance network info cache due to event network-changed-f5bb4ca5-90d8-4da8-a8c0-bbaf14254969. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1416.817351] env[62519]: DEBUG oslo_concurrency.lockutils [req-1fc67a81-a467-4cf7-90b3-428c222deafe req-7e2c745a-253d-475a-95ca-5236bc3ff6de service nova] Acquiring lock "refresh_cache-31f55ece-82e6-40ad-ad7c-1af645f307bf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.817351] env[62519]: DEBUG oslo_concurrency.lockutils [req-1fc67a81-a467-4cf7-90b3-428c222deafe req-7e2c745a-253d-475a-95ca-5236bc3ff6de service nova] Acquired lock "refresh_cache-31f55ece-82e6-40ad-ad7c-1af645f307bf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.817495] env[62519]: DEBUG nova.network.neutron [req-1fc67a81-a467-4cf7-90b3-428c222deafe req-7e2c745a-253d-475a-95ca-5236bc3ff6de service nova] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Refreshing network info cache for port f5bb4ca5-90d8-4da8-a8c0-bbaf14254969 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1416.946207] env[62519]: DEBUG nova.scheduler.client.report [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1416.971486] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801996, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.204621} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.971774] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1416.972602] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc67386e-0a92-4230-ac87-254d177da4b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.998498] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 31f55ece-82e6-40ad-ad7c-1af645f307bf/31f55ece-82e6-40ad-ad7c-1af645f307bf.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1416.999406] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87e536a0-44ff-4957-a3d9-93f59569ce1b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.020683] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1417.020683] env[62519]: value = "task-1801997" [ 1417.020683] env[62519]: _type = "Task" [ 1417.020683] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.029161] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801997, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.066860] env[62519]: DEBUG nova.network.neutron [-] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.279068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "refresh_cache-029ee07c-705d-452b-9b14-385d69f2fbbb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.279068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquired lock "refresh_cache-029ee07c-705d-452b-9b14-385d69f2fbbb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.279068] env[62519]: DEBUG nova.network.neutron [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1417.328544] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.456022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.685s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.456225] env[62519]: INFO nova.compute.manager [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Migrating [ 1417.456700] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.456700] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.457874] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.215s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.539286] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801997, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.570025] env[62519]: INFO nova.compute.manager [-] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Took 2.15 seconds to deallocate network for instance. [ 1417.686033] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4c3fd39d-ef1d-4593-8784-6a91635c8aff tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "099112ae-569b-4853-bc47-b0b8b97d2525" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.939s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.734563] env[62519]: DEBUG nova.network.neutron [req-1fc67a81-a467-4cf7-90b3-428c222deafe req-7e2c745a-253d-475a-95ca-5236bc3ff6de service nova] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Updated VIF entry in instance network info cache for port f5bb4ca5-90d8-4da8-a8c0-bbaf14254969. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1417.734946] env[62519]: DEBUG nova.network.neutron [req-1fc67a81-a467-4cf7-90b3-428c222deafe req-7e2c745a-253d-475a-95ca-5236bc3ff6de service nova] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Updating instance_info_cache with network_info: [{"id": "f5bb4ca5-90d8-4da8-a8c0-bbaf14254969", "address": "fa:16:3e:4c:e7:81", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5bb4ca5-90", "ovs_interfaceid": "f5bb4ca5-90d8-4da8-a8c0-bbaf14254969", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.830384] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquiring lock "ceadcb5e-ee82-4441-b046-f79b973ec05e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.830384] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "ceadcb5e-ee82-4441-b046-f79b973ec05e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.830384] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquiring lock "ceadcb5e-ee82-4441-b046-f79b973ec05e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.830384] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "ceadcb5e-ee82-4441-b046-f79b973ec05e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.830736] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "ceadcb5e-ee82-4441-b046-f79b973ec05e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.831922] env[62519]: INFO nova.compute.manager [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Terminating instance [ 1417.835932] env[62519]: DEBUG nova.network.neutron [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1417.853236] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3dc13758-4a00-4637-af78-ec1a85cd10bf tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "c07e4d30-44bc-417b-8137-97f974aec932" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.942s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.965539] env[62519]: INFO nova.compute.rpcapi [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 1417.966515] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.036253] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801997, 'name': ReconfigVM_Task, 'duration_secs': 0.559548} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.036733] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 31f55ece-82e6-40ad-ad7c-1af645f307bf/31f55ece-82e6-40ad-ad7c-1af645f307bf.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1418.038103] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28f3bf4f-1107-44d1-87e6-8b48eda0a49e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.045925] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1418.045925] env[62519]: value = "task-1801998" [ 1418.045925] env[62519]: _type = "Task" [ 1418.045925] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.057835] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801998, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.079278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.112302] env[62519]: DEBUG nova.network.neutron [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Updating instance_info_cache with network_info: [{"id": "d260fe42-dd87-4c6b-a292-ce14b9314974", "address": "fa:16:3e:84:c6:c7", "network": {"id": "87dc972b-a043-4db2-b67a-2efe6d40d1aa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1993987139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8929cfd4892c4e399062483665500dd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd260fe42-dd", "ovs_interfaceid": "d260fe42-dd87-4c6b-a292-ce14b9314974", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.189648] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1418.238463] env[62519]: DEBUG oslo_concurrency.lockutils [req-1fc67a81-a467-4cf7-90b3-428c222deafe req-7e2c745a-253d-475a-95ca-5236bc3ff6de service nova] Releasing lock "refresh_cache-31f55ece-82e6-40ad-ad7c-1af645f307bf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.340715] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquiring lock "refresh_cache-ceadcb5e-ee82-4441-b046-f79b973ec05e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.340715] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquired lock "refresh_cache-ceadcb5e-ee82-4441-b046-f79b973ec05e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.340715] env[62519]: DEBUG nova.network.neutron [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1418.355817] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1418.426256] env[62519]: DEBUG nova.compute.manager [req-d222d303-f399-47c3-8ddd-69cc4a3dfa71 req-7133b039-a0b1-4907-b35d-17767d3ab9f2 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Received event network-changed-ac832bcb-7bb4-49de-be18-a8fd0d8ee16e {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1418.426256] env[62519]: DEBUG nova.compute.manager [req-d222d303-f399-47c3-8ddd-69cc4a3dfa71 req-7133b039-a0b1-4907-b35d-17767d3ab9f2 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Refreshing instance network info cache due to event network-changed-ac832bcb-7bb4-49de-be18-a8fd0d8ee16e. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1418.426256] env[62519]: DEBUG oslo_concurrency.lockutils [req-d222d303-f399-47c3-8ddd-69cc4a3dfa71 req-7133b039-a0b1-4907-b35d-17767d3ab9f2 service nova] Acquiring lock "refresh_cache-2b391628-18a2-4606-8c59-58ba642cee50" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.426256] env[62519]: DEBUG oslo_concurrency.lockutils [req-d222d303-f399-47c3-8ddd-69cc4a3dfa71 req-7133b039-a0b1-4907-b35d-17767d3ab9f2 service nova] Acquired lock "refresh_cache-2b391628-18a2-4606-8c59-58ba642cee50" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.426256] env[62519]: DEBUG nova.network.neutron [req-d222d303-f399-47c3-8ddd-69cc4a3dfa71 req-7133b039-a0b1-4907-b35d-17767d3ab9f2 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Refreshing network info cache for port ac832bcb-7bb4-49de-be18-a8fd0d8ee16e {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1418.497894] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Applying migration context for instance fe350d30-6fbd-4813-9634-ed05984fecfd as it has an incoming, in-progress migration f90a3c1a-0ac3-49d8-8f4b-3ad7b5d43ffd. Migration status is pre-migrating {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1418.499659] env[62519]: INFO nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating resource usage from migration f90a3c1a-0ac3-49d8-8f4b-3ad7b5d43ffd [ 1418.504551] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.504551] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.504551] env[62519]: DEBUG nova.network.neutron [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1418.563950] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801998, 'name': Rename_Task, 'duration_secs': 0.186558} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.565238] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1418.565238] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f80118f-16f0-4de5-951f-b8d0186e8680 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.574278] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1418.574278] env[62519]: value = "task-1801999" [ 1418.574278] env[62519]: _type = "Task" [ 1418.574278] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.582387] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801999, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.617043] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Releasing lock "refresh_cache-029ee07c-705d-452b-9b14-385d69f2fbbb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.617155] env[62519]: DEBUG nova.compute.manager [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Instance network_info: |[{"id": "d260fe42-dd87-4c6b-a292-ce14b9314974", "address": "fa:16:3e:84:c6:c7", "network": {"id": "87dc972b-a043-4db2-b67a-2efe6d40d1aa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1993987139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8929cfd4892c4e399062483665500dd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd260fe42-dd", "ovs_interfaceid": "d260fe42-dd87-4c6b-a292-ce14b9314974", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1418.617619] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:c6:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b8137fc-f23d-49b1-b19c-3123a5588f34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd260fe42-dd87-4c6b-a292-ce14b9314974', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1418.625246] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Creating folder: Project (8929cfd4892c4e399062483665500dd9). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1418.625612] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45e3e719-1c4d-4225-b9cd-04235d392d5a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.635827] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Created folder: Project (8929cfd4892c4e399062483665500dd9) in parent group-v373567. [ 1418.636060] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Creating folder: Instances. Parent ref: group-v373607. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1418.636386] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59741ae4-18e0-4471-a918-f5fd54fd5587 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.644834] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Created folder: Instances in parent group-v373607. [ 1418.645084] env[62519]: DEBUG oslo.service.loopingcall [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1418.645286] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1418.645504] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33195aa1-993e-4b79-b07c-d1ece6a7710b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.667246] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1418.667246] env[62519]: value = "task-1802002" [ 1418.667246] env[62519]: _type = "Task" [ 1418.667246] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.679612] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802002, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.720553] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.838658] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "40c7a9b8-d541-464a-ba87-76cfc183ae31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.839058] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "40c7a9b8-d541-464a-ba87-76cfc183ae31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.861175] env[62519]: DEBUG nova.network.neutron [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1418.887773] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.951529] env[62519]: DEBUG nova.network.neutron [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.035184] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 42497ab5-cce9-4614-a6d1-dffbf6764d7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1419.086433] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801999, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.109621] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.109888] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.178899] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802002, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.456260] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Releasing lock "refresh_cache-ceadcb5e-ee82-4441-b046-f79b973ec05e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.456260] env[62519]: DEBUG nova.compute.manager [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1419.456260] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1419.456959] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ce3bf9-4a67-4162-8f7f-e917614b502e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.473351] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1419.473630] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-275f7849-4648-4d11-9578-769ae5c48857 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.482999] env[62519]: DEBUG oslo_vmware.api [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1419.482999] env[62519]: value = "task-1802003" [ 1419.482999] env[62519]: _type = "Task" [ 1419.482999] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.497651] env[62519]: DEBUG oslo_vmware.api [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1802003, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.521476] env[62519]: DEBUG nova.network.neutron [req-d222d303-f399-47c3-8ddd-69cc4a3dfa71 req-7133b039-a0b1-4907-b35d-17767d3ab9f2 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Updated VIF entry in instance network info cache for port ac832bcb-7bb4-49de-be18-a8fd0d8ee16e. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1419.521836] env[62519]: DEBUG nova.network.neutron [req-d222d303-f399-47c3-8ddd-69cc4a3dfa71 req-7133b039-a0b1-4907-b35d-17767d3ab9f2 service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Updating instance_info_cache with network_info: [{"id": "ac832bcb-7bb4-49de-be18-a8fd0d8ee16e", "address": "fa:16:3e:b2:f6:9d", "network": {"id": "0413ee0f-d5e1-4c09-b98e-01bbb1481053", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1023814213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998210eef9e04b64a426b66c965130a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac832bcb-7b", "ovs_interfaceid": "ac832bcb-7bb4-49de-be18-a8fd0d8ee16e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.541643] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 49221ea3-d457-4cf5-97a9-9ae74c4e86fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1419.542229] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 11d4a010-959f-4f53-94dc-7499007612ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.544171] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 099112ae-569b-4853-bc47-b0b8b97d2525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.544171] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 34d2991e-b6df-473d-8994-e45ff57ef131 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.550841] env[62519]: INFO nova.compute.manager [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Rebuilding instance [ 1419.570673] env[62519]: DEBUG nova.network.neutron [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance_info_cache with network_info: [{"id": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "address": "fa:16:3e:21:41:9e", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cce4a48-b7", "ovs_interfaceid": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.592174] env[62519]: DEBUG oslo_vmware.api [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1801999, 'name': PowerOnVM_Task, 'duration_secs': 0.731598} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.592493] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1419.592768] env[62519]: INFO nova.compute.manager [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Took 8.93 seconds to spawn the instance on the hypervisor. [ 1419.595033] env[62519]: DEBUG nova.compute.manager [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1419.595033] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bf0768-ea68-4650-9990-e1122917d7e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.616623] env[62519]: DEBUG nova.compute.manager [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1419.617537] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f1933a-6160-4770-a0a3-bc5290b20e86 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.681705] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802002, 'name': CreateVM_Task, 'duration_secs': 0.517259} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.682523] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1419.684311] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.684311] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.685123] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1419.685867] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31387c27-5392-4e5a-9147-663e46c13017 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.692586] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1419.692586] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5293e1f8-e328-b075-fa9e-afdb246e05c7" [ 1419.692586] env[62519]: _type = "Task" [ 1419.692586] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.703596] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5293e1f8-e328-b075-fa9e-afdb246e05c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.000836] env[62519]: DEBUG oslo_vmware.api [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1802003, 'name': PowerOffVM_Task, 'duration_secs': 0.171364} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.001204] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.001204] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1420.001426] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97e5d0d1-bef9-439b-930a-bcb1adadd038 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.028055] env[62519]: DEBUG oslo_concurrency.lockutils [req-d222d303-f399-47c3-8ddd-69cc4a3dfa71 req-7133b039-a0b1-4907-b35d-17767d3ab9f2 service nova] Releasing lock "refresh_cache-2b391628-18a2-4606-8c59-58ba642cee50" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.028055] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1420.028055] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1420.028055] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Deleting the datastore file [datastore1] ceadcb5e-ee82-4441-b046-f79b973ec05e {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1420.029276] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7f3abc8-8fbd-4c9f-82c7-39a6562f2150 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.038938] env[62519]: DEBUG oslo_vmware.api [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for the task: (returnval){ [ 1420.038938] env[62519]: value = "task-1802005" [ 1420.038938] env[62519]: _type = "Task" [ 1420.038938] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.052683] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4c336ad1-8ce6-4f89-843e-0baae0d0dbda has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1420.052777] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance ceadcb5e-ee82-4441-b046-f79b973ec05e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1420.054117] env[62519]: DEBUG oslo_vmware.api [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1802005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.073892] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.115887] env[62519]: INFO nova.compute.manager [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Took 23.13 seconds to build instance. [ 1420.163190] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "f59a31e4-7fb9-4de7-b35f-da811a305f85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.163190] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "f59a31e4-7fb9-4de7-b35f-da811a305f85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.189686] env[62519]: DEBUG nova.compute.manager [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Received event network-changed-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1420.189742] env[62519]: DEBUG nova.compute.manager [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Refreshing instance network info cache due to event network-changed-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1420.190095] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] Acquiring lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.190179] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] Acquired lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.190278] env[62519]: DEBUG nova.network.neutron [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Refreshing network info cache for port ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1420.207480] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5293e1f8-e328-b075-fa9e-afdb246e05c7, 'name': SearchDatastore_Task, 'duration_secs': 0.021955} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.207877] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.208089] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1420.208354] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.208691] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.208691] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1420.213248] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4b6ce93-99b6-4a1c-a8b5-e6b94c7ff4b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.318590] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "1d4b14d3-8832-457e-aaed-462236555f57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.318845] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "1d4b14d3-8832-457e-aaed-462236555f57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.550455] env[62519]: DEBUG oslo_vmware.api [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1802005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.561830] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 91902e7f-8c15-447b-a3a8-04433434b1b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1420.561993] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 31f55ece-82e6-40ad-ad7c-1af645f307bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1420.612777] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1420.612777] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1420.612777] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cd01673-a01c-4ebb-ba40-f5c050bb6e90 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.621711] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1420.621711] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522ca1d2-ef58-0ba1-e5b9-d2d009911995" [ 1420.621711] env[62519]: _type = "Task" [ 1420.621711] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.634094] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.634602] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522ca1d2-ef58-0ba1-e5b9-d2d009911995, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.634957] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5524a8ce-d2d2-41b6-a8ec-1be922da7938 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.643743] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1420.643743] env[62519]: value = "task-1802006" [ 1420.643743] env[62519]: _type = "Task" [ 1420.643743] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.655611] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.033942] env[62519]: DEBUG nova.network.neutron [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updated VIF entry in instance network info cache for port ca1a3bbf-3f10-4a96-a67d-b77464ab25e7. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1421.033942] env[62519]: DEBUG nova.network.neutron [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updating instance_info_cache with network_info: [{"id": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "address": "fa:16:3e:cc:ef:21", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1a3bbf-3f", "ovs_interfaceid": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.053265] env[62519]: DEBUG oslo_vmware.api [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Task: {'id': task-1802005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.610938} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.053265] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1421.053265] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1421.053265] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1421.053265] env[62519]: INFO nova.compute.manager [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Took 1.60 seconds to destroy the instance on the hypervisor. [ 1421.053454] env[62519]: DEBUG oslo.service.loopingcall [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1421.053454] env[62519]: DEBUG nova.compute.manager [-] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1421.053733] env[62519]: DEBUG nova.network.neutron [-] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1421.068285] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 1462d213-3f9a-4c60-8056-0b68f20a4939 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.068285] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c07e4d30-44bc-417b-8137-97f974aec932 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.068285] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 029ee07c-705d-452b-9b14-385d69f2fbbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.071424] env[62519]: DEBUG nova.network.neutron [-] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1421.144259] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522ca1d2-ef58-0ba1-e5b9-d2d009911995, 'name': SearchDatastore_Task, 'duration_secs': 0.017561} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.148795] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32506bbc-2053-44be-80ad-cbd6be038f76 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.160269] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1421.160269] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52618dd1-e232-29a2-ea6e-7cbc0ce5d2f0" [ 1421.160269] env[62519]: _type = "Task" [ 1421.160269] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.164309] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802006, 'name': PowerOffVM_Task, 'duration_secs': 0.186384} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.168128] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1421.168397] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1421.169771] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b53d4a1-b02e-4453-90b9-46a693a34a67 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.178787] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52618dd1-e232-29a2-ea6e-7cbc0ce5d2f0, 'name': SearchDatastore_Task, 'duration_secs': 0.014129} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.181013] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.181308] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 029ee07c-705d-452b-9b14-385d69f2fbbb/029ee07c-705d-452b-9b14-385d69f2fbbb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1421.181604] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1421.181825] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-473df2ab-b39d-46b4-bc54-754f339b9bcd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.183817] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a308b8b1-b3a2-4d82-9b70-102e225b9356 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.190973] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1421.190973] env[62519]: value = "task-1802007" [ 1421.190973] env[62519]: _type = "Task" [ 1421.190973] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.200470] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.208348] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1421.208348] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1421.208348] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Deleting the datastore file [datastore1] c07e4d30-44bc-417b-8137-97f974aec932 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1421.208614] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31bdefbe-de0a-43a8-9389-ae75f82ce7e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.215760] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1421.215760] env[62519]: value = "task-1802009" [ 1421.215760] env[62519]: _type = "Task" [ 1421.215760] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.229934] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802009, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.239273] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.239533] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.539308] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] Releasing lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.539602] env[62519]: DEBUG nova.compute.manager [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Received event network-vif-plugged-d260fe42-dd87-4c6b-a292-ce14b9314974 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1421.539795] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] Acquiring lock "029ee07c-705d-452b-9b14-385d69f2fbbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.540060] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] Lock "029ee07c-705d-452b-9b14-385d69f2fbbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.540270] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] Lock "029ee07c-705d-452b-9b14-385d69f2fbbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.540479] env[62519]: DEBUG nova.compute.manager [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] No waiting events found dispatching network-vif-plugged-d260fe42-dd87-4c6b-a292-ce14b9314974 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1421.540661] env[62519]: WARNING nova.compute.manager [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Received unexpected event network-vif-plugged-d260fe42-dd87-4c6b-a292-ce14b9314974 for instance with vm_state building and task_state spawning. [ 1421.540823] env[62519]: DEBUG nova.compute.manager [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Received event network-vif-deleted-03319779-1123-4b6b-899b-834771303eff {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1421.541033] env[62519]: DEBUG nova.compute.manager [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Received event network-changed-d260fe42-dd87-4c6b-a292-ce14b9314974 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1421.543202] env[62519]: DEBUG nova.compute.manager [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Refreshing instance network info cache due to event network-changed-d260fe42-dd87-4c6b-a292-ce14b9314974. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1421.543202] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] Acquiring lock "refresh_cache-029ee07c-705d-452b-9b14-385d69f2fbbb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.543202] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] Acquired lock "refresh_cache-029ee07c-705d-452b-9b14-385d69f2fbbb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.543202] env[62519]: DEBUG nova.network.neutron [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Refreshing network info cache for port d260fe42-dd87-4c6b-a292-ce14b9314974 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1421.571966] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c61c893f-826b-4874-b253-de6fbffa9e5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.574611] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 2b391628-18a2-4606-8c59-58ba642cee50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.575967] env[62519]: DEBUG nova.network.neutron [-] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.597934] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb572bdc-4f86-4a7e-af2b-515a6903aeac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.621822] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance 'fe350d30-6fbd-4813-9634-ed05984fecfd' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1421.636033] env[62519]: DEBUG oslo_concurrency.lockutils [None req-21ea1173-3fc6-4081-929e-331a7f1267ad tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "31f55ece-82e6-40ad-ad7c-1af645f307bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.472s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.705931] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802007, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.727667] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802009, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299836} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.729230] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1421.729438] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1421.729609] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1421.747111] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.747786] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.773193] env[62519]: DEBUG oslo_vmware.rw_handles [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52773c87-f717-b6e8-3480-e8574de4125a/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1421.774606] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76145ef-152b-4e29-8482-20b84416642f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.781783] env[62519]: DEBUG oslo_vmware.rw_handles [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52773c87-f717-b6e8-3480-e8574de4125a/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1421.781982] env[62519]: ERROR oslo_vmware.rw_handles [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52773c87-f717-b6e8-3480-e8574de4125a/disk-0.vmdk due to incomplete transfer. [ 1421.782320] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-82084b1e-d523-42db-938f-f994d2c1f732 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.791403] env[62519]: DEBUG oslo_vmware.rw_handles [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52773c87-f717-b6e8-3480-e8574de4125a/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1421.791648] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Uploaded image f89b79a3-eee9-4e62-ac73-98c46676969e to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1421.795831] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1421.796183] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d920124b-fda7-48eb-90ee-9a48bc119dba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.804930] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1421.804930] env[62519]: value = "task-1802010" [ 1421.804930] env[62519]: _type = "Task" [ 1421.804930] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.817314] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802010, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.033381] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "31f55ece-82e6-40ad-ad7c-1af645f307bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.033381] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "31f55ece-82e6-40ad-ad7c-1af645f307bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.033381] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "31f55ece-82e6-40ad-ad7c-1af645f307bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.033381] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "31f55ece-82e6-40ad-ad7c-1af645f307bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.033697] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "31f55ece-82e6-40ad-ad7c-1af645f307bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.034350] env[62519]: INFO nova.compute.manager [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Terminating instance [ 1422.078513] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c612a1be-fb39-416d-a9d2-d206582e5aeb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1422.078711] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8b178cc0-db79-4ec2-8962-f31b936f8eff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.081539] env[62519]: INFO nova.compute.manager [-] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Took 1.03 seconds to deallocate network for instance. [ 1422.130140] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1422.132728] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-760281f9-4213-44ab-be38-bb8551b7652f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.138588] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1422.138588] env[62519]: value = "task-1802011" [ 1422.138588] env[62519]: _type = "Task" [ 1422.138588] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.138840] env[62519]: DEBUG nova.compute.manager [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1422.159308] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802011, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.209823] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802007, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.316500] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802010, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.397193] env[62519]: DEBUG nova.network.neutron [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Updated VIF entry in instance network info cache for port d260fe42-dd87-4c6b-a292-ce14b9314974. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1422.397709] env[62519]: DEBUG nova.network.neutron [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Updating instance_info_cache with network_info: [{"id": "d260fe42-dd87-4c6b-a292-ce14b9314974", "address": "fa:16:3e:84:c6:c7", "network": {"id": "87dc972b-a043-4db2-b67a-2efe6d40d1aa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1993987139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8929cfd4892c4e399062483665500dd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd260fe42-dd", "ovs_interfaceid": "d260fe42-dd87-4c6b-a292-ce14b9314974", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.541519] env[62519]: DEBUG nova.compute.manager [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1422.541794] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1422.543223] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27330109-b9b5-4a3e-bfad-437f69ec92f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.554617] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1422.554922] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a1689ea-1b43-4a6d-8bcc-93b2e06f35ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.562031] env[62519]: DEBUG oslo_vmware.api [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1422.562031] env[62519]: value = "task-1802012" [ 1422.562031] env[62519]: _type = "Task" [ 1422.562031] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.571846] env[62519]: DEBUG oslo_vmware.api [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.583958] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f7d5c77d-6c78-4969-b511-2b03ab624c84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1422.584263] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Migration f90a3c1a-0ac3-49d8-8f4b-3ad7b5d43ffd is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1422.584417] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance fe350d30-6fbd-4813-9634-ed05984fecfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.593076] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.659045] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802011, 'name': PowerOffVM_Task, 'duration_secs': 0.423219} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.662719] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1422.663760] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance 'fe350d30-6fbd-4813-9634-ed05984fecfd' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1422.711245] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802007, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.230893} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.711245] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.711245] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 029ee07c-705d-452b-9b14-385d69f2fbbb/029ee07c-705d-452b-9b14-385d69f2fbbb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1422.711245] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1422.711643] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0b00079-89e1-4ee8-b09e-0cbc311e4d05 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.718593] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1422.718593] env[62519]: value = "task-1802013" [ 1422.718593] env[62519]: _type = "Task" [ 1422.718593] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.728299] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802013, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.775034] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1422.775034] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1422.775034] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1422.775034] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1422.775607] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1422.775607] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1422.775607] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1422.775607] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1422.775607] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1422.775793] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1422.775793] env[62519]: DEBUG nova.virt.hardware [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1422.775793] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1153ced2-b947-4437-81a0-10d78d35b075 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.787740] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe626cd9-a50e-4271-98bb-f164357ffff6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.803267] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1422.808346] env[62519]: DEBUG oslo.service.loopingcall [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1422.808721] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1422.811916] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13dbb736-e838-4123-8c74-b3a6277851c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.830178] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802010, 'name': Destroy_Task, 'duration_secs': 0.688609} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.831552] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Destroyed the VM [ 1422.831711] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1422.831893] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1422.831893] env[62519]: value = "task-1802014" [ 1422.831893] env[62519]: _type = "Task" [ 1422.831893] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.832101] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9f2be469-a507-4350-91de-7c89d9c94dad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.843387] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802014, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.846033] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1422.846033] env[62519]: value = "task-1802015" [ 1422.846033] env[62519]: _type = "Task" [ 1422.846033] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.854278] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802015, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.903751] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c518c0d-bf95-4a0c-ad33-473500dce7dc req-31115c58-96d3-4e2c-84ee-50ec1821b951 service nova] Releasing lock "refresh_cache-029ee07c-705d-452b-9b14-385d69f2fbbb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.075572] env[62519]: DEBUG oslo_vmware.api [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802012, 'name': PowerOffVM_Task, 'duration_secs': 0.221992} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.076594] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1423.076594] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1423.076594] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd403110-d20c-45ff-b189-65f8ae90bcf4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.095166] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 021c3287-5887-457e-9b3a-233308fb9b23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1423.172581] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1423.172903] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1423.173049] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1423.173212] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1423.173368] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1423.173600] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1423.173835] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1423.174026] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1423.174216] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1423.174401] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1423.174601] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1423.180049] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2a619a5-24ad-4555-a6df-6710506a096e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.202668] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1423.202668] env[62519]: value = "task-1802017" [ 1423.202668] env[62519]: _type = "Task" [ 1423.202668] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.213049] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.229836] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802013, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.186244} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.230875] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1423.231471] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e6f143-1e44-48da-94a8-0c62f27b0a09 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.262393] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 029ee07c-705d-452b-9b14-385d69f2fbbb/029ee07c-705d-452b-9b14-385d69f2fbbb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1423.263170] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-820ad21a-97e4-4ec3-866b-259fe59b7ec4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.285379] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1423.285379] env[62519]: value = "task-1802018" [ 1423.285379] env[62519]: _type = "Task" [ 1423.285379] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.299930] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802018, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.345446] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802014, 'name': CreateVM_Task, 'duration_secs': 0.331009} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.345446] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1423.345932] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.346119] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.346524] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1423.346864] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-897f9dc0-143d-4516-ba9e-27f30345e734 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.365728] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1423.365728] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521066fb-b76f-f224-1ee1-a858134efad2" [ 1423.365728] env[62519]: _type = "Task" [ 1423.365728] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.365728] env[62519]: DEBUG oslo_vmware.api [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802015, 'name': RemoveSnapshot_Task, 'duration_secs': 0.415994} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.366515] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1423.366515] env[62519]: INFO nova.compute.manager [None req-120ef937-d998-4979-a2ea-db598aea78e6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Took 16.14 seconds to snapshot the instance on the hypervisor. [ 1423.379519] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521066fb-b76f-f224-1ee1-a858134efad2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.598766] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1423.711829] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802017, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.795900] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802018, 'name': ReconfigVM_Task, 'duration_secs': 0.300569} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.797779] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 029ee07c-705d-452b-9b14-385d69f2fbbb/029ee07c-705d-452b-9b14-385d69f2fbbb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1423.797779] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a57ac79-723e-436d-bbc6-5bca95342467 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.805502] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1423.805502] env[62519]: value = "task-1802019" [ 1423.805502] env[62519]: _type = "Task" [ 1423.805502] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.815503] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802019, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.880347] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521066fb-b76f-f224-1ee1-a858134efad2, 'name': SearchDatastore_Task, 'duration_secs': 0.076522} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.881814] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.882449] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1423.882449] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.882449] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.882697] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1423.882928] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1caec3ec-bff6-4ca5-a97a-ad815f31557a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.895487] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1423.896277] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1423.897304] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35092dd4-68c2-47b5-b80c-cd2f0b240e50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.905514] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1423.905514] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5275e790-8b7e-b537-0883-4bdbd773299b" [ 1423.905514] env[62519]: _type = "Task" [ 1423.905514] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.917975] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5275e790-8b7e-b537-0883-4bdbd773299b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.017646] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "8b178cc0-db79-4ec2-8962-f31b936f8eff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.018051] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.018815] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "8b178cc0-db79-4ec2-8962-f31b936f8eff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.018815] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.018815] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.021596] env[62519]: INFO nova.compute.manager [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Terminating instance [ 1424.101721] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 56790738-4759-468a-9f43-f9c2bc2de23a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1424.216496] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802017, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.321437] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802019, 'name': Rename_Task, 'duration_secs': 0.160528} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.321437] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1424.321437] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-975448a1-9d86-4d97-8953-7427d2c7a434 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.332275] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1424.332275] env[62519]: value = "task-1802020" [ 1424.332275] env[62519]: _type = "Task" [ 1424.332275] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.344610] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802020, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.419928] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5275e790-8b7e-b537-0883-4bdbd773299b, 'name': SearchDatastore_Task, 'duration_secs': 0.013373} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.421160] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8b8a18a-ea86-441e-8679-e7fca256bd56 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.428047] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1424.428047] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523e3f63-27f7-f439-a544-8319cdf11d6a" [ 1424.428047] env[62519]: _type = "Task" [ 1424.428047] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.440084] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523e3f63-27f7-f439-a544-8319cdf11d6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.525766] env[62519]: DEBUG nova.compute.manager [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1424.526126] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1424.527252] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f98e21e-eaac-4ff9-94ec-24d16892add9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.543858] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1424.547019] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-feb73ade-41a7-4e02-8ff1-f55813298439 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.606299] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4a29bff8-050a-4ad5-9d06-3a59c40b97ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1424.607323] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c616d8ec-f28a-4430-a336-1ea4790fd511 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1424.713822] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802017, 'name': ReconfigVM_Task, 'duration_secs': 1.126382} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.714192] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance 'fe350d30-6fbd-4813-9634-ed05984fecfd' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1424.845976] env[62519]: DEBUG oslo_vmware.api [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802020, 'name': PowerOnVM_Task, 'duration_secs': 0.50277} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.846409] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1424.846796] env[62519]: INFO nova.compute.manager [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Took 10.28 seconds to spawn the instance on the hypervisor. [ 1424.846873] env[62519]: DEBUG nova.compute.manager [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1424.847904] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b66bdd2-b3e1-4c6e-b6f5-5c9c89e3b754 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.951228] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523e3f63-27f7-f439-a544-8319cdf11d6a, 'name': SearchDatastore_Task, 'duration_secs': 0.014573} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.951536] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.951806] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1424.952109] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-daba96d8-3019-4f53-a0b1-07724b8f3642 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.963055] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1424.963055] env[62519]: value = "task-1802022" [ 1424.963055] env[62519]: _type = "Task" [ 1424.963055] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.980037] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802022, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.000941] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquiring lock "a59be5e6-2316-4766-933a-4d01dfe4fec1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.000941] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "a59be5e6-2316-4766-933a-4d01dfe4fec1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.041657] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.042121] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.111049] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 681ef7a9-3b24-450a-9034-6d30177995d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1425.111382] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c8b7568b-ba07-4f65-818b-f84910209361 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1425.111382] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1425.227146] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1425.227304] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1425.227998] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1425.227998] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1425.227998] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1425.228261] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1425.228549] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1425.228789] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1425.228998] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1425.229716] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1425.229716] env[62519]: DEBUG nova.virt.hardware [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1425.237455] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Reconfiguring VM instance instance-00000002 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1425.238238] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d16f4e7-dd41-44d2-b08d-afefc0f0317e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.260086] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1425.260086] env[62519]: value = "task-1802023" [ 1425.260086] env[62519]: _type = "Task" [ 1425.260086] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.272188] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802023, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.376449] env[62519]: INFO nova.compute.manager [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Took 26.62 seconds to build instance. [ 1425.478364] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802022, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.534662] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1425.535522] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1425.535870] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleting the datastore file [datastore1] 31f55ece-82e6-40ad-ad7c-1af645f307bf {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.536747] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4303d1c-afd3-46b6-9c08-c2d8cdca182c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.549267] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1425.549465] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1425.549639] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleting the datastore file [datastore1] 8b178cc0-db79-4ec2-8962-f31b936f8eff {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.551047] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3ecd26a-8cc5-4d26-a56f-02ae54de154b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.556300] env[62519]: DEBUG oslo_vmware.api [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1425.556300] env[62519]: value = "task-1802024" [ 1425.556300] env[62519]: _type = "Task" [ 1425.556300] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.560816] env[62519]: DEBUG oslo_vmware.api [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1425.560816] env[62519]: value = "task-1802025" [ 1425.560816] env[62519]: _type = "Task" [ 1425.560816] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.567823] env[62519]: DEBUG oslo_vmware.api [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802024, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.573158] env[62519]: DEBUG oslo_vmware.api [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.618765] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 40c7a9b8-d541-464a-ba87-76cfc183ae31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1425.618765] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1425.618765] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1425.777129] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802023, 'name': ReconfigVM_Task, 'duration_secs': 0.323598} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.777341] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Reconfigured VM instance instance-00000002 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1425.778177] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb19cdc2-ea8b-4ef4-b4ee-3e8219d4e9a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.809954] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] fe350d30-6fbd-4813-9634-ed05984fecfd/fe350d30-6fbd-4813-9634-ed05984fecfd.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1425.813141] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cb7697c-937a-4f7e-bfa1-9966a4aae076 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.838319] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1425.838319] env[62519]: value = "task-1802026" [ 1425.838319] env[62519]: _type = "Task" [ 1425.838319] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.849333] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802026, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.978427] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802022, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525008} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.978545] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1425.978782] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1425.980230] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f226c29-b2ec-41c8-9b09-2e7679628fcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.993285] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1425.993285] env[62519]: value = "task-1802027" [ 1425.993285] env[62519]: _type = "Task" [ 1425.993285] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.003826] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.071668] env[62519]: DEBUG oslo_vmware.api [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802024, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188713} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.073017] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1426.073017] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1426.073017] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1426.073017] env[62519]: INFO nova.compute.manager [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Took 3.53 seconds to destroy the instance on the hypervisor. [ 1426.073228] env[62519]: DEBUG oslo.service.loopingcall [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.073773] env[62519]: DEBUG nova.compute.manager [-] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1426.073863] env[62519]: DEBUG nova.network.neutron [-] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1426.088184] env[62519]: DEBUG oslo_vmware.api [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189452} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.089520] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1426.089520] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1426.089740] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1426.090059] env[62519]: INFO nova.compute.manager [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Took 1.56 seconds to destroy the instance on the hypervisor. [ 1426.090366] env[62519]: DEBUG oslo.service.loopingcall [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.090660] env[62519]: DEBUG nova.compute.manager [-] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1426.090740] env[62519]: DEBUG nova.network.neutron [-] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1426.286181] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec605f3c-fbc7-4052-b164-6baaded754e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.298289] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157f2057-832e-4fc2-bbec-163f6c0c2a94 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.335159] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1706895b-6a75-4570-8f0c-50c87d496987 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.349805] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099704e4-16ee-4d55-b554-23eb77ca97ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.356814] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802026, 'name': ReconfigVM_Task, 'duration_secs': 0.281171} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.357630] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Reconfigured VM instance instance-00000002 to attach disk [datastore1] fe350d30-6fbd-4813-9634-ed05984fecfd/fe350d30-6fbd-4813-9634-ed05984fecfd.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1426.357850] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance 'fe350d30-6fbd-4813-9634-ed05984fecfd' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1426.371321] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.505524] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116346} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.505808] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1426.506703] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38780fde-3e4c-43c4-a8ed-443291ab83f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.532010] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1426.532543] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8850eff8-958d-48a8-93fe-d5386a29ffb6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.556486] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1426.556486] env[62519]: value = "task-1802028" [ 1426.556486] env[62519]: _type = "Task" [ 1426.556486] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.569612] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802028, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.876871] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1426.884104] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335b9d08-3d27-4283-b781-248669b2c3ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.889672] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b63f15ff-942a-4ceb-acce-0511e4a5332d tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "029ee07c-705d-452b-9b14-385d69f2fbbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.596s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.921721] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e037b4-545b-415b-928b-1deca11f3699 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.945780] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance 'fe350d30-6fbd-4813-9634-ed05984fecfd' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1427.068424] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802028, 'name': ReconfigVM_Task, 'duration_secs': 0.270196} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.068424] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Reconfigured VM instance instance-0000000c to attach disk [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1427.069400] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aab47c5b-7308-47e6-bc28-40c41225837b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.077470] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1427.077470] env[62519]: value = "task-1802029" [ 1427.077470] env[62519]: _type = "Task" [ 1427.077470] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.088023] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802029, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.382761] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1427.383074] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.925s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.383424] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.049s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1427.385139] env[62519]: INFO nova.compute.claims [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1427.394021] env[62519]: DEBUG nova.compute.manager [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1427.425679] env[62519]: DEBUG nova.network.neutron [-] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.428287] env[62519]: DEBUG nova.network.neutron [-] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.516864] env[62519]: DEBUG nova.network.neutron [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Port 6cce4a48-b732-4bd4-a39e-bbc701b31b3b binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1427.593672] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802029, 'name': Rename_Task, 'duration_secs': 0.135315} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.593672] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1427.594697] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ddb8ca9-93b0-4b3f-84c6-dd66af6fdb45 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.607277] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1427.607277] env[62519]: value = "task-1802030" [ 1427.607277] env[62519]: _type = "Task" [ 1427.607277] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.615872] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802030, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.618526] env[62519]: DEBUG nova.compute.manager [req-55382eab-6152-42ec-a9c9-3b8e9e19b48d req-a8fdf019-4cb7-4413-966c-672534ab509f service nova] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Received event network-vif-deleted-f5bb4ca5-90d8-4da8-a8c0-bbaf14254969 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1427.921955] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.931210] env[62519]: INFO nova.compute.manager [-] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Took 1.86 seconds to deallocate network for instance. [ 1427.931624] env[62519]: INFO nova.compute.manager [-] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Took 1.84 seconds to deallocate network for instance. [ 1428.121974] env[62519]: DEBUG oslo_vmware.api [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802030, 'name': PowerOnVM_Task, 'duration_secs': 0.472466} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.122165] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1428.122515] env[62519]: DEBUG nova.compute.manager [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1428.123791] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c36b971-0fbf-44bd-812a-542fe73fef7e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.160431] env[62519]: DEBUG nova.compute.manager [req-812a132c-be36-4674-9bc5-92ca92acbdf9 req-b5203608-5e47-4256-81fd-fbf9c64ca71a service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Received event network-changed-d260fe42-dd87-4c6b-a292-ce14b9314974 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1428.160619] env[62519]: DEBUG nova.compute.manager [req-812a132c-be36-4674-9bc5-92ca92acbdf9 req-b5203608-5e47-4256-81fd-fbf9c64ca71a service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Refreshing instance network info cache due to event network-changed-d260fe42-dd87-4c6b-a292-ce14b9314974. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1428.160872] env[62519]: DEBUG oslo_concurrency.lockutils [req-812a132c-be36-4674-9bc5-92ca92acbdf9 req-b5203608-5e47-4256-81fd-fbf9c64ca71a service nova] Acquiring lock "refresh_cache-029ee07c-705d-452b-9b14-385d69f2fbbb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.160961] env[62519]: DEBUG oslo_concurrency.lockutils [req-812a132c-be36-4674-9bc5-92ca92acbdf9 req-b5203608-5e47-4256-81fd-fbf9c64ca71a service nova] Acquired lock "refresh_cache-029ee07c-705d-452b-9b14-385d69f2fbbb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.161129] env[62519]: DEBUG nova.network.neutron [req-812a132c-be36-4674-9bc5-92ca92acbdf9 req-b5203608-5e47-4256-81fd-fbf9c64ca71a service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Refreshing network info cache for port d260fe42-dd87-4c6b-a292-ce14b9314974 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1428.447793] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.462489] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.547917] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "fe350d30-6fbd-4813-9634-ed05984fecfd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.548193] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.548337] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.648018] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.806257] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "09eefc1a-011b-4d2c-ab75-a1fcee740907" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.806550] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "09eefc1a-011b-4d2c-ab75-a1fcee740907" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.116465] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cfcc01-e524-4334-95d3-f678defe9164 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.130051] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8398fb-a178-44f9-b449-19542143e0aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.170512] env[62519]: DEBUG nova.network.neutron [req-812a132c-be36-4674-9bc5-92ca92acbdf9 req-b5203608-5e47-4256-81fd-fbf9c64ca71a service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Updated VIF entry in instance network info cache for port d260fe42-dd87-4c6b-a292-ce14b9314974. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1429.171019] env[62519]: DEBUG nova.network.neutron [req-812a132c-be36-4674-9bc5-92ca92acbdf9 req-b5203608-5e47-4256-81fd-fbf9c64ca71a service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Updating instance_info_cache with network_info: [{"id": "d260fe42-dd87-4c6b-a292-ce14b9314974", "address": "fa:16:3e:84:c6:c7", "network": {"id": "87dc972b-a043-4db2-b67a-2efe6d40d1aa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1993987139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8929cfd4892c4e399062483665500dd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd260fe42-dd", "ovs_interfaceid": "d260fe42-dd87-4c6b-a292-ce14b9314974", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.172888] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cfc848-fab8-40dd-8f78-48e55d761904 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.183537] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0166a37-ea5b-4033-b430-884546845a75 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.198369] env[62519]: DEBUG nova.compute.provider_tree [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1429.382207] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.382574] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.663414] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.663669] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.663765] env[62519]: DEBUG nova.network.neutron [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1429.676881] env[62519]: DEBUG oslo_concurrency.lockutils [req-812a132c-be36-4674-9bc5-92ca92acbdf9 req-b5203608-5e47-4256-81fd-fbf9c64ca71a service nova] Releasing lock "refresh_cache-029ee07c-705d-452b-9b14-385d69f2fbbb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.701919] env[62519]: DEBUG nova.scheduler.client.report [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1429.894989] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquiring lock "22380aef-c725-43a0-a957-06ced9518c21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.895633] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "22380aef-c725-43a0-a957-06ced9518c21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.897057] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.897340] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1429.897506] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 1430.104394] env[62519]: DEBUG nova.compute.manager [req-69e0bb37-8874-4b82-9fc5-05302ab39ee1 req-28dd69bb-3d6a-438d-b97e-35ce9720dae0 service nova] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Received event network-vif-deleted-f1b8639e-9539-46c0-8663-e7017bf77486 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1430.107034] env[62519]: INFO nova.compute.manager [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Rebuilding instance [ 1430.171931] env[62519]: DEBUG nova.compute.manager [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1430.174376] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d49220-e7b7-4a00-a4c9-4cc10e3d201e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.207203] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.824s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.207709] env[62519]: DEBUG nova.compute.manager [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1430.211319] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.794s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.212324] env[62519]: INFO nova.compute.claims [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1430.406204] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Skipping network cache update for instance because it is Building. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10431}} [ 1430.406574] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.599998] env[62519]: DEBUG nova.network.neutron [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance_info_cache with network_info: [{"id": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "address": "fa:16:3e:21:41:9e", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cce4a48-b7", "ovs_interfaceid": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.722279] env[62519]: DEBUG nova.compute.utils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1430.727886] env[62519]: DEBUG nova.compute.manager [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1430.728166] env[62519]: DEBUG nova.network.neutron [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1430.778548] env[62519]: DEBUG nova.policy [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab749b128a9b40ef86efca1af2909cc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '679c16f2d0ab479ab1e7395dab280c96', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1431.103598] env[62519]: DEBUG oslo_concurrency.lockutils [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.108643] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquired lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.109021] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Forcefully refreshing network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1431.109235] env[62519]: DEBUG nova.objects.instance [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lazy-loading 'info_cache' on Instance uuid fe350d30-6fbd-4813-9634-ed05984fecfd {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1431.195050] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1431.195673] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab449bbd-c8a0-4aa5-8084-c6d5e4a901ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.205393] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1431.205393] env[62519]: value = "task-1802031" [ 1431.205393] env[62519]: _type = "Task" [ 1431.205393] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.220343] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802031, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.228824] env[62519]: DEBUG nova.compute.manager [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1431.264320] env[62519]: DEBUG nova.network.neutron [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Successfully created port: 57934d0f-3be6-4e2d-9e86-b5500fae4b3b {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1431.637792] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c64fe6-adc3-46a0-a4fc-b78e4adeb86a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.660752] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92f26d5-fd9b-4902-99a9-1a3e2dbe2a3b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.670143] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance 'fe350d30-6fbd-4813-9634-ed05984fecfd' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1431.718637] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802031, 'name': PowerOffVM_Task, 'duration_secs': 0.14388} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.718879] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1431.719380] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1431.720492] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc187db-f76c-448c-9b82-133cbb52a8c3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.733502] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1431.733502] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7829214d-1b64-4895-b7d8-361517c281e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.772315] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1431.772315] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1431.772315] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Deleting the datastore file [datastore1] c07e4d30-44bc-417b-8137-97f974aec932 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.772315] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76cf78a4-a0b8-4af1-af71-356581ed2113 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.782811] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1431.782811] env[62519]: value = "task-1802033" [ 1431.782811] env[62519]: _type = "Task" [ 1431.782811] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.792734] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.861988] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c836023c-84a9-46b8-9c05-9cc16b624516 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.872836] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0cfa95-83ca-4a9d-abb4-e4afd6dc2df0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.915145] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f7dac0-ec64-4b1b-9dc3-87b83b871fe4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.924687] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d8fcf7-d748-49d7-8912-944053dfa75f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.941230] env[62519]: DEBUG nova.compute.provider_tree [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.178697] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1432.179191] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f00bea20-caff-427b-b731-2808fd3a73db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.191465] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1432.191465] env[62519]: value = "task-1802034" [ 1432.191465] env[62519]: _type = "Task" [ 1432.191465] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.203746] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802034, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.250081] env[62519]: DEBUG nova.compute.manager [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1432.282784] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1432.283278] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1432.283629] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1432.283967] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1432.287018] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1432.287018] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1432.287018] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1432.287018] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1432.287018] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1432.287375] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1432.287375] env[62519]: DEBUG nova.virt.hardware [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1432.287375] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec61654-a268-48b0-8491-578e8f5aadc9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.301589] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104569} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.303979] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1432.304350] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1432.304659] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1432.313042] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a82b11-584d-4a4f-8317-3cfd13f5c026 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.444807] env[62519]: DEBUG nova.scheduler.client.report [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1432.705066] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802034, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.950247] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.739s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.952108] env[62519]: DEBUG nova.compute.manager [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1432.955674] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.463s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.955874] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.960832] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.500s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.960832] env[62519]: INFO nova.compute.claims [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1432.989906] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance_info_cache with network_info: [{"id": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "address": "fa:16:3e:21:41:9e", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cce4a48-b7", "ovs_interfaceid": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.999993] env[62519]: INFO nova.scheduler.client.report [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Deleted allocations for instance c616d8ec-f28a-4430-a336-1ea4790fd511 [ 1433.211028] env[62519]: DEBUG oslo_vmware.api [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802034, 'name': PowerOnVM_Task, 'duration_secs': 0.530012} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.211028] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1433.211166] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-62794cdc-f128-48b3-a56b-aae18e2e0aa4 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance 'fe350d30-6fbd-4813-9634-ed05984fecfd' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1433.276196] env[62519]: DEBUG nova.network.neutron [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Successfully updated port: 57934d0f-3be6-4e2d-9e86-b5500fae4b3b {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1433.365016] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1433.365016] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1433.365016] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1433.365016] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1433.365326] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1433.365326] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1433.365326] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1433.366130] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1433.366502] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1433.366802] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1433.370017] env[62519]: DEBUG nova.virt.hardware [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1433.370017] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2547df61-d3fd-40e7-9665-e3ebe619461c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.377465] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5671f467-a777-4e44-aa0f-c3f61bccbfe5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.394541] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1433.401097] env[62519]: DEBUG oslo.service.loopingcall [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1433.401526] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1433.401909] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73570e3e-a0c3-4383-88fb-b1a55daf31be {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.418022] env[62519]: DEBUG nova.compute.manager [req-f4244607-97e1-4db5-866d-270f9e3d0704 req-625966e9-6ce7-4566-809f-eb80d611b2d7 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Received event network-vif-plugged-57934d0f-3be6-4e2d-9e86-b5500fae4b3b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1433.418022] env[62519]: DEBUG oslo_concurrency.lockutils [req-f4244607-97e1-4db5-866d-270f9e3d0704 req-625966e9-6ce7-4566-809f-eb80d611b2d7 service nova] Acquiring lock "91902e7f-8c15-447b-a3a8-04433434b1b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.418022] env[62519]: DEBUG oslo_concurrency.lockutils [req-f4244607-97e1-4db5-866d-270f9e3d0704 req-625966e9-6ce7-4566-809f-eb80d611b2d7 service nova] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.418022] env[62519]: DEBUG oslo_concurrency.lockutils [req-f4244607-97e1-4db5-866d-270f9e3d0704 req-625966e9-6ce7-4566-809f-eb80d611b2d7 service nova] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.418272] env[62519]: DEBUG nova.compute.manager [req-f4244607-97e1-4db5-866d-270f9e3d0704 req-625966e9-6ce7-4566-809f-eb80d611b2d7 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] No waiting events found dispatching network-vif-plugged-57934d0f-3be6-4e2d-9e86-b5500fae4b3b {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1433.418318] env[62519]: WARNING nova.compute.manager [req-f4244607-97e1-4db5-866d-270f9e3d0704 req-625966e9-6ce7-4566-809f-eb80d611b2d7 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Received unexpected event network-vif-plugged-57934d0f-3be6-4e2d-9e86-b5500fae4b3b for instance with vm_state building and task_state spawning. [ 1433.427063] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1433.427063] env[62519]: value = "task-1802035" [ 1433.427063] env[62519]: _type = "Task" [ 1433.427063] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.434752] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802035, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.471382] env[62519]: DEBUG nova.compute.utils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1433.475503] env[62519]: DEBUG nova.compute.manager [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1433.475503] env[62519]: DEBUG nova.network.neutron [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1433.498031] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Releasing lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.498277] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updated the network info_cache for instance {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10489}} [ 1433.498880] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.500389] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.500521] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.500662] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1433.512063] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34a2cefc-4314-45a8-b8ba-97221121738f tempest-ServerDiagnosticsNegativeTest-1844110598 tempest-ServerDiagnosticsNegativeTest-1844110598-project-member] Lock "c616d8ec-f28a-4430-a336-1ea4790fd511" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.356s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.569928] env[62519]: DEBUG nova.policy [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8dc0b2589af74b5f9602732295e8e228', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92b737895c7c42f78fbc5d0fff165dc8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1433.777763] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquiring lock "refresh_cache-91902e7f-8c15-447b-a3a8-04433434b1b6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.777897] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquired lock "refresh_cache-91902e7f-8c15-447b-a3a8-04433434b1b6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.778054] env[62519]: DEBUG nova.network.neutron [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1433.922110] env[62519]: DEBUG nova.network.neutron [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Successfully created port: 8a1a2471-fe2e-4bb8-917a-135d0d5f4859 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1433.940607] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802035, 'name': CreateVM_Task, 'duration_secs': 0.326627} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.941289] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1433.941922] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.943101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.943101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1433.943958] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ebe31be-f7d0-48f2-b9cd-6faa0f6ab1e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.949865] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1433.949865] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5280bd16-9e35-9e9f-81b9-90d3d75a153e" [ 1433.949865] env[62519]: _type = "Task" [ 1433.949865] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.961458] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5280bd16-9e35-9e9f-81b9-90d3d75a153e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.978952] env[62519]: DEBUG nova.compute.manager [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1434.322427] env[62519]: DEBUG nova.network.neutron [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1434.460855] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5280bd16-9e35-9e9f-81b9-90d3d75a153e, 'name': SearchDatastore_Task, 'duration_secs': 0.011135} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.461660] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.461660] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1434.461660] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.461900] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.461972] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1434.462283] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d59255f-a2c1-461d-ac6f-c7ae39a84f5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.473885] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1434.473993] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1434.475978] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0abe0e6a-3285-4fe8-924e-72043541b8f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.492576] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1434.492576] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527e5829-858e-3b57-9053-9f1d383e30a3" [ 1434.492576] env[62519]: _type = "Task" [ 1434.492576] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.510342] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527e5829-858e-3b57-9053-9f1d383e30a3, 'name': SearchDatastore_Task, 'duration_secs': 0.01101} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.511180] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c21d0c69-f6f5-4276-a0d2-02896b778ba1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.523367] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1434.523367] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524d5378-35ee-cc2a-9cad-bdcaa6097cce" [ 1434.523367] env[62519]: _type = "Task" [ 1434.523367] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.537691] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524d5378-35ee-cc2a-9cad-bdcaa6097cce, 'name': SearchDatastore_Task, 'duration_secs': 0.010272} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.538678] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.538971] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1434.539995] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abf1f133-b614-4412-b2b6-9b1b4a050f7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.554407] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1434.554407] env[62519]: value = "task-1802036" [ 1434.554407] env[62519]: _type = "Task" [ 1434.554407] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.563154] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802036, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.568806] env[62519]: DEBUG nova.network.neutron [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Updating instance_info_cache with network_info: [{"id": "57934d0f-3be6-4e2d-9e86-b5500fae4b3b", "address": "fa:16:3e:f5:c6:8e", "network": {"id": "562e50fb-c1d1-4596-a481-657942fd7331", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-962652241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "679c16f2d0ab479ab1e7395dab280c96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57934d0f-3b", "ovs_interfaceid": "57934d0f-3be6-4e2d-9e86-b5500fae4b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.647143] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3cf4b3-769a-423a-a814-d2a32b1e0727 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.654772] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076563e5-da4e-42a9-8813-60e71c1a82ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.451045] env[62519]: DEBUG nova.compute.manager [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1435.453599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Releasing lock "refresh_cache-91902e7f-8c15-447b-a3a8-04433434b1b6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.453875] env[62519]: DEBUG nova.compute.manager [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Instance network_info: |[{"id": "57934d0f-3be6-4e2d-9e86-b5500fae4b3b", "address": "fa:16:3e:f5:c6:8e", "network": {"id": "562e50fb-c1d1-4596-a481-657942fd7331", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-962652241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "679c16f2d0ab479ab1e7395dab280c96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57934d0f-3b", "ovs_interfaceid": "57934d0f-3be6-4e2d-9e86-b5500fae4b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1435.461375] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:c6:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16f09e8c-5240-4839-80cc-62ec29700bd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57934d0f-3be6-4e2d-9e86-b5500fae4b3b', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1435.469428] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Creating folder: Project (679c16f2d0ab479ab1e7395dab280c96). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1435.470976] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "f19c860f-736a-4783-8ef5-8262040e53a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.470976] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "f19c860f-736a-4783-8ef5-8262040e53a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.472040] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff460155-24bd-4373-be81-c48f0fe7691c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.503562] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95509665-0cd5-4e05-8e5d-773120f4e3d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.509101] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802036, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516153} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.510213] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1435.510213] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1435.513485] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1435.513735] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1435.513908] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1435.514087] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1435.514236] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1435.514374] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1435.514573] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1435.514741] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1435.514877] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1435.515041] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1435.515213] env[62519]: DEBUG nova.virt.hardware [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1435.515527] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-492ffe13-1d22-426a-a20e-d0f4e165a9fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.517620] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Created folder: Project (679c16f2d0ab479ab1e7395dab280c96) in parent group-v373567. [ 1435.517790] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Creating folder: Instances. Parent ref: group-v373612. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1435.518885] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd24eb1b-af43-42ed-bba3-5e098be13ecb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.523736] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cd51718-84f1-4ff0-a410-b31bdb08d145 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.527357] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690b64a8-9ccd-4537-8fce-5a9a92364ada {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.535858] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1435.535858] env[62519]: value = "task-1802038" [ 1435.535858] env[62519]: _type = "Task" [ 1435.535858] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.540201] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc294cdf-3118-486a-8aaa-571726638469 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.557027] env[62519]: DEBUG nova.compute.provider_tree [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.558453] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Created folder: Instances in parent group-v373612. [ 1435.558694] env[62519]: DEBUG oslo.service.loopingcall [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1435.559784] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1435.560380] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d34d989b-67f4-45c6-9965-b9e78db7190b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.589730] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802038, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.595112] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1435.595112] env[62519]: value = "task-1802040" [ 1435.595112] env[62519]: _type = "Task" [ 1435.595112] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.604568] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802040, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.608182] env[62519]: DEBUG nova.network.neutron [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Successfully updated port: 8a1a2471-fe2e-4bb8-917a-135d0d5f4859 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1435.740785] env[62519]: DEBUG nova.compute.manager [req-6951d96f-ef96-4f24-92e0-28deb23f367d req-dc48c095-e3d5-4aa8-9f25-19b69d40a856 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Received event network-changed-57934d0f-3be6-4e2d-9e86-b5500fae4b3b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1435.740989] env[62519]: DEBUG nova.compute.manager [req-6951d96f-ef96-4f24-92e0-28deb23f367d req-dc48c095-e3d5-4aa8-9f25-19b69d40a856 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Refreshing instance network info cache due to event network-changed-57934d0f-3be6-4e2d-9e86-b5500fae4b3b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1435.741260] env[62519]: DEBUG oslo_concurrency.lockutils [req-6951d96f-ef96-4f24-92e0-28deb23f367d req-dc48c095-e3d5-4aa8-9f25-19b69d40a856 service nova] Acquiring lock "refresh_cache-91902e7f-8c15-447b-a3a8-04433434b1b6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.741463] env[62519]: DEBUG oslo_concurrency.lockutils [req-6951d96f-ef96-4f24-92e0-28deb23f367d req-dc48c095-e3d5-4aa8-9f25-19b69d40a856 service nova] Acquired lock "refresh_cache-91902e7f-8c15-447b-a3a8-04433434b1b6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.741631] env[62519]: DEBUG nova.network.neutron [req-6951d96f-ef96-4f24-92e0-28deb23f367d req-dc48c095-e3d5-4aa8-9f25-19b69d40a856 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Refreshing network info cache for port 57934d0f-3be6-4e2d-9e86-b5500fae4b3b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1436.049835] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802038, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.369033} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.050185] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1436.051058] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63771a24-9abc-489c-b030-d9f079432291 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.065579] env[62519]: DEBUG nova.scheduler.client.report [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1436.079355] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1436.079932] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-593dae06-fb8e-4a51-aeab-c1a0c316e7d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.110687] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "refresh_cache-c61c893f-826b-4874-b253-de6fbffa9e5a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.111281] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquired lock "refresh_cache-c61c893f-826b-4874-b253-de6fbffa9e5a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.111281] env[62519]: DEBUG nova.network.neutron [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1436.112363] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802040, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.114640] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1436.114640] env[62519]: value = "task-1802041" [ 1436.114640] env[62519]: _type = "Task" [ 1436.114640] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.126673] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802041, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.151078] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "fe350d30-6fbd-4813-9634-ed05984fecfd" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.151078] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.152678] env[62519]: DEBUG nova.compute.manager [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Going to confirm migration 1 {{(pid=62519) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5235}} [ 1436.581368] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.622s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.582136] env[62519]: DEBUG nova.compute.manager [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1436.585525] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.005s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.587180] env[62519]: INFO nova.compute.claims [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1436.595196] env[62519]: DEBUG nova.network.neutron [req-6951d96f-ef96-4f24-92e0-28deb23f367d req-dc48c095-e3d5-4aa8-9f25-19b69d40a856 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Updated VIF entry in instance network info cache for port 57934d0f-3be6-4e2d-9e86-b5500fae4b3b. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1436.595545] env[62519]: DEBUG nova.network.neutron [req-6951d96f-ef96-4f24-92e0-28deb23f367d req-dc48c095-e3d5-4aa8-9f25-19b69d40a856 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Updating instance_info_cache with network_info: [{"id": "57934d0f-3be6-4e2d-9e86-b5500fae4b3b", "address": "fa:16:3e:f5:c6:8e", "network": {"id": "562e50fb-c1d1-4596-a481-657942fd7331", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-962652241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "679c16f2d0ab479ab1e7395dab280c96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57934d0f-3b", "ovs_interfaceid": "57934d0f-3be6-4e2d-9e86-b5500fae4b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1436.614136] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802040, 'name': CreateVM_Task, 'duration_secs': 0.91369} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.614316] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1436.616434] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.616634] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.616949] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1436.620042] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-959dd1c4-764e-46fb-a7ff-d7394f7b3fd4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.634180] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1436.634180] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5205e093-e97e-87bf-e185-35a30c793334" [ 1436.634180] env[62519]: _type = "Task" [ 1436.634180] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.637378] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802041, 'name': ReconfigVM_Task, 'duration_secs': 0.32474} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.641501] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Reconfigured VM instance instance-0000000c to attach disk [datastore1] c07e4d30-44bc-417b-8137-97f974aec932/c07e4d30-44bc-417b-8137-97f974aec932.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1436.642421] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-204df10b-676f-4e27-adfb-4177399d626b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.651718] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5205e093-e97e-87bf-e185-35a30c793334, 'name': SearchDatastore_Task, 'duration_secs': 0.010412} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.653076] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.653441] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1436.653681] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.653825] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.653997] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1436.655661] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1436.655661] env[62519]: value = "task-1802042" [ 1436.655661] env[62519]: _type = "Task" [ 1436.655661] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.655867] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00786269-1ebe-4de4-861b-acc84afa3d80 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.683709] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1436.684207] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1436.684661] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0f0a709-3b9d-4f48-a2b0-a27c6e6b36dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.690593] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1436.690593] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5257f6c6-ac45-b027-9e57-25a707001eee" [ 1436.690593] env[62519]: _type = "Task" [ 1436.690593] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.699392] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5257f6c6-ac45-b027-9e57-25a707001eee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.713942] env[62519]: DEBUG nova.network.neutron [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1436.849411] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.849624] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.850072] env[62519]: DEBUG nova.network.neutron [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1436.850072] env[62519]: DEBUG nova.objects.instance [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lazy-loading 'info_cache' on Instance uuid fe350d30-6fbd-4813-9634-ed05984fecfd {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1436.988010] env[62519]: DEBUG nova.network.neutron [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Updating instance_info_cache with network_info: [{"id": "8a1a2471-fe2e-4bb8-917a-135d0d5f4859", "address": "fa:16:3e:cc:22:35", "network": {"id": "58441f17-708d-4a4c-9e78-8a8d403416ae", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-132258554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92b737895c7c42f78fbc5d0fff165dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a1a2471-fe", "ovs_interfaceid": "8a1a2471-fe2e-4bb8-917a-135d0d5f4859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.092066] env[62519]: DEBUG nova.compute.utils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1437.093734] env[62519]: DEBUG nova.compute.manager [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1437.093963] env[62519]: DEBUG nova.network.neutron [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1437.100393] env[62519]: DEBUG oslo_concurrency.lockutils [req-6951d96f-ef96-4f24-92e0-28deb23f367d req-dc48c095-e3d5-4aa8-9f25-19b69d40a856 service nova] Releasing lock "refresh_cache-91902e7f-8c15-447b-a3a8-04433434b1b6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.174278] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802042, 'name': Rename_Task, 'duration_secs': 0.190474} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.174507] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1437.176280] env[62519]: DEBUG nova.policy [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab1a0434cdf44cda9ccac27452b1ffb7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d19c8963364423880f5c2d61fb5800f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1437.178911] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c98a114f-a51f-4337-8654-a800fb26a4f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.187999] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Waiting for the task: (returnval){ [ 1437.187999] env[62519]: value = "task-1802043" [ 1437.187999] env[62519]: _type = "Task" [ 1437.187999] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.201196] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802043, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.207355] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5257f6c6-ac45-b027-9e57-25a707001eee, 'name': SearchDatastore_Task, 'duration_secs': 0.012506} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.207355] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b56772dd-0771-4741-b472-ba88399b3c78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.214082] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1437.214082] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5219f083-6007-f918-a0d1-e1f2543bd4b1" [ 1437.214082] env[62519]: _type = "Task" [ 1437.214082] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.224889] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5219f083-6007-f918-a0d1-e1f2543bd4b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.490344] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Releasing lock "refresh_cache-c61c893f-826b-4874-b253-de6fbffa9e5a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.490958] env[62519]: DEBUG nova.compute.manager [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Instance network_info: |[{"id": "8a1a2471-fe2e-4bb8-917a-135d0d5f4859", "address": "fa:16:3e:cc:22:35", "network": {"id": "58441f17-708d-4a4c-9e78-8a8d403416ae", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-132258554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92b737895c7c42f78fbc5d0fff165dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a1a2471-fe", "ovs_interfaceid": "8a1a2471-fe2e-4bb8-917a-135d0d5f4859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1437.491422] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:22:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a1a2471-fe2e-4bb8-917a-135d0d5f4859', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1437.502232] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Creating folder: Project (92b737895c7c42f78fbc5d0fff165dc8). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1437.503647] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f0701c7-d693-4e4a-89d6-fce8fd842d33 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.518151] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Created folder: Project (92b737895c7c42f78fbc5d0fff165dc8) in parent group-v373567. [ 1437.518409] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Creating folder: Instances. Parent ref: group-v373615. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1437.518729] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6efd1087-c129-405d-87f1-d77fc8ce1b7f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.533264] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Created folder: Instances in parent group-v373615. [ 1437.533264] env[62519]: DEBUG oslo.service.loopingcall [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1437.533264] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1437.533264] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5683616-1627-49e8-a647-14f4a4e3bc08 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.562022] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1437.562022] env[62519]: value = "task-1802046" [ 1437.562022] env[62519]: _type = "Task" [ 1437.562022] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.571206] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802046, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.600813] env[62519]: DEBUG nova.compute.manager [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1437.701635] env[62519]: DEBUG oslo_vmware.api [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Task: {'id': task-1802043, 'name': PowerOnVM_Task, 'duration_secs': 0.474594} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.701635] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1437.701635] env[62519]: DEBUG nova.compute.manager [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1437.702558] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8519a47c-4bb8-412f-84ce-f47c2a6261f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.725905] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5219f083-6007-f918-a0d1-e1f2543bd4b1, 'name': SearchDatastore_Task, 'duration_secs': 0.022706} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.726316] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.726577] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 91902e7f-8c15-447b-a3a8-04433434b1b6/91902e7f-8c15-447b-a3a8-04433434b1b6.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1437.727352] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-552d1c82-306b-422c-a6b8-f82918a59b05 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.739355] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1437.739355] env[62519]: value = "task-1802047" [ 1437.739355] env[62519]: _type = "Task" [ 1437.739355] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.753147] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802047, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.806733] env[62519]: DEBUG nova.network.neutron [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Successfully created port: c5d604f1-fa0e-433c-af5c-5e2116499fe1 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1438.082412] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802046, 'name': CreateVM_Task, 'duration_secs': 0.35324} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.082412] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1438.082412] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.082412] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.083647] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1438.083647] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9bd7886-b99d-4a69-bace-14c1d3733a73 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.091872] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1438.091872] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f63ed3-3684-2666-b25c-ba9f260eb312" [ 1438.091872] env[62519]: _type = "Task" [ 1438.091872] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.112903] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f63ed3-3684-2666-b25c-ba9f260eb312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.221462] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.255045] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802047, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484361} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.255045] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 91902e7f-8c15-447b-a3a8-04433434b1b6/91902e7f-8c15-447b-a3a8-04433434b1b6.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1438.255045] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1438.256229] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67198bbd-e184-47b3-8fc8-eb82d343c651 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.266295] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1438.266295] env[62519]: value = "task-1802048" [ 1438.266295] env[62519]: _type = "Task" [ 1438.266295] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.277973] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.291113] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3faa648a-1b82-4075-8ef2-6f0074247649 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.302453] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60e81b0-9e3f-4a4d-a08b-7dcb0691b57d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.339859] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1814f10e-50a7-4a2f-ac6f-b35efe25e022 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.351118] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd11803-bfde-487d-8229-8fdd1464740e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.364620] env[62519]: DEBUG nova.compute.provider_tree [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1438.375357] env[62519]: DEBUG nova.network.neutron [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance_info_cache with network_info: [{"id": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "address": "fa:16:3e:21:41:9e", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cce4a48-b7", "ovs_interfaceid": "6cce4a48-b732-4bd4-a39e-bbc701b31b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.432605] env[62519]: DEBUG nova.compute.manager [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Received event network-vif-plugged-8a1a2471-fe2e-4bb8-917a-135d0d5f4859 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1438.432886] env[62519]: DEBUG oslo_concurrency.lockutils [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] Acquiring lock "c61c893f-826b-4874-b253-de6fbffa9e5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.433623] env[62519]: DEBUG oslo_concurrency.lockutils [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.433807] env[62519]: DEBUG oslo_concurrency.lockutils [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.434173] env[62519]: DEBUG nova.compute.manager [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] No waiting events found dispatching network-vif-plugged-8a1a2471-fe2e-4bb8-917a-135d0d5f4859 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1438.434173] env[62519]: WARNING nova.compute.manager [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Received unexpected event network-vif-plugged-8a1a2471-fe2e-4bb8-917a-135d0d5f4859 for instance with vm_state building and task_state spawning. [ 1438.434443] env[62519]: DEBUG nova.compute.manager [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Received event network-changed-8a1a2471-fe2e-4bb8-917a-135d0d5f4859 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1438.434667] env[62519]: DEBUG nova.compute.manager [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Refreshing instance network info cache due to event network-changed-8a1a2471-fe2e-4bb8-917a-135d0d5f4859. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1438.434932] env[62519]: DEBUG oslo_concurrency.lockutils [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] Acquiring lock "refresh_cache-c61c893f-826b-4874-b253-de6fbffa9e5a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.435257] env[62519]: DEBUG oslo_concurrency.lockutils [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] Acquired lock "refresh_cache-c61c893f-826b-4874-b253-de6fbffa9e5a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.435507] env[62519]: DEBUG nova.network.neutron [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Refreshing network info cache for port 8a1a2471-fe2e-4bb8-917a-135d0d5f4859 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1438.608477] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f63ed3-3684-2666-b25c-ba9f260eb312, 'name': SearchDatastore_Task, 'duration_secs': 0.06196} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.608717] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.608817] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1438.609053] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.609195] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.609405] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1438.609618] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68cb218c-f51a-4ad0-a0ca-b6dea46f9bff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.614081] env[62519]: DEBUG nova.compute.manager [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1438.628447] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1438.628447] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1438.630396] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a2e88c2-b9b5-4133-8fa7-0b68c6b40267 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.639195] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1438.639195] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52afe44a-dda0-6a2e-03d6-d18ee6389ad3" [ 1438.639195] env[62519]: _type = "Task" [ 1438.639195] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.646178] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1438.646369] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1438.646437] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1438.646620] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1438.646759] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1438.646903] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1438.647119] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1438.647277] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1438.647439] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1438.647602] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1438.647774] env[62519]: DEBUG nova.virt.hardware [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1438.649799] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5a0753-49a1-49b2-8673-7506c3dfba59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.654459] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52afe44a-dda0-6a2e-03d6-d18ee6389ad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.660057] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e06330-9c6e-40f3-becc-c7500509cd7e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.777684] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068275} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.777966] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1438.778756] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a66e7ce-24ff-431f-b75a-6b5187d2137a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.807448] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 91902e7f-8c15-447b-a3a8-04433434b1b6/91902e7f-8c15-447b-a3a8-04433434b1b6.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.807794] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87f3c0ff-a5bc-49e1-bf61-d6d57f8dfe3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.829460] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1438.829460] env[62519]: value = "task-1802049" [ 1438.829460] env[62519]: _type = "Task" [ 1438.829460] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.837794] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.870499] env[62519]: DEBUG nova.scheduler.client.report [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1438.877258] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "refresh_cache-fe350d30-6fbd-4813-9634-ed05984fecfd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.877450] env[62519]: DEBUG nova.objects.instance [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lazy-loading 'migration_context' on Instance uuid fe350d30-6fbd-4813-9634-ed05984fecfd {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1439.152329] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52afe44a-dda0-6a2e-03d6-d18ee6389ad3, 'name': SearchDatastore_Task, 'duration_secs': 0.049897} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.153366] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cafae301-fa33-4394-80c0-64102f1b5463 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.159578] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1439.159578] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5213ad17-5cc2-d59a-5f65-eb48459d5972" [ 1439.159578] env[62519]: _type = "Task" [ 1439.159578] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.169180] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5213ad17-5cc2-d59a-5f65-eb48459d5972, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.182095] env[62519]: DEBUG nova.network.neutron [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Updated VIF entry in instance network info cache for port 8a1a2471-fe2e-4bb8-917a-135d0d5f4859. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1439.182613] env[62519]: DEBUG nova.network.neutron [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Updating instance_info_cache with network_info: [{"id": "8a1a2471-fe2e-4bb8-917a-135d0d5f4859", "address": "fa:16:3e:cc:22:35", "network": {"id": "58441f17-708d-4a4c-9e78-8a8d403416ae", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-132258554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92b737895c7c42f78fbc5d0fff165dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a1a2471-fe", "ovs_interfaceid": "8a1a2471-fe2e-4bb8-917a-135d0d5f4859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.340880] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802049, 'name': ReconfigVM_Task, 'duration_secs': 0.507585} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.341384] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 91902e7f-8c15-447b-a3a8-04433434b1b6/91902e7f-8c15-447b-a3a8-04433434b1b6.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1439.342208] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2c16c12-c800-4d51-b8b7-43593736c45f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.349999] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1439.349999] env[62519]: value = "task-1802050" [ 1439.349999] env[62519]: _type = "Task" [ 1439.349999] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.360356] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802050, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.374793] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.788s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.374793] env[62519]: DEBUG nova.compute.manager [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1439.381028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.952s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.381028] env[62519]: INFO nova.compute.claims [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1439.384971] env[62519]: DEBUG nova.objects.base [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1439.386428] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698ba15e-be28-4300-b462-0c7e2d9327b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.412015] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-197ef30c-28f1-4ca8-8766-d807f95ed4a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.419957] env[62519]: DEBUG oslo_vmware.api [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1439.419957] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256b0bf-eca3-2c60-4599-c424d5702eea" [ 1439.419957] env[62519]: _type = "Task" [ 1439.419957] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.437799] env[62519]: DEBUG oslo_vmware.api [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256b0bf-eca3-2c60-4599-c424d5702eea, 'name': SearchDatastore_Task, 'duration_secs': 0.013501} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.438767] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.626999] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "c07e4d30-44bc-417b-8137-97f974aec932" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.627369] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "c07e4d30-44bc-417b-8137-97f974aec932" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.627624] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "c07e4d30-44bc-417b-8137-97f974aec932-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.627827] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "c07e4d30-44bc-417b-8137-97f974aec932-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.627993] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "c07e4d30-44bc-417b-8137-97f974aec932-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.630088] env[62519]: INFO nova.compute.manager [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Terminating instance [ 1439.669930] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5213ad17-5cc2-d59a-5f65-eb48459d5972, 'name': SearchDatastore_Task, 'duration_secs': 0.018598} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.670563] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.670563] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c61c893f-826b-4874-b253-de6fbffa9e5a/c61c893f-826b-4874-b253-de6fbffa9e5a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1439.670728] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e749134-35ed-4cd2-abca-bcb2409eee6e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.678804] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1439.678804] env[62519]: value = "task-1802051" [ 1439.678804] env[62519]: _type = "Task" [ 1439.678804] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.685599] env[62519]: DEBUG oslo_concurrency.lockutils [req-09fc638a-b876-4381-8545-71108304de6e req-5085e3ce-7ac1-4b11-a9d2-3d6e85830a88 service nova] Releasing lock "refresh_cache-c61c893f-826b-4874-b253-de6fbffa9e5a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.690161] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802051, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.710664] env[62519]: DEBUG nova.network.neutron [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Successfully updated port: c5d604f1-fa0e-433c-af5c-5e2116499fe1 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1439.863036] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802050, 'name': Rename_Task, 'duration_secs': 0.154989} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.863412] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1439.863742] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7479777-0f62-474f-b8ca-83e6d83075cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.871557] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1439.871557] env[62519]: value = "task-1802052" [ 1439.871557] env[62519]: _type = "Task" [ 1439.871557] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.881893] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.889286] env[62519]: DEBUG nova.compute.utils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1439.891493] env[62519]: DEBUG nova.compute.manager [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Not allocating networking since 'none' was specified. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1985}} [ 1440.134472] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "refresh_cache-c07e4d30-44bc-417b-8137-97f974aec932" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.134629] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquired lock "refresh_cache-c07e4d30-44bc-417b-8137-97f974aec932" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.134931] env[62519]: DEBUG nova.network.neutron [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.196032] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802051, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.215928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquiring lock "refresh_cache-c612a1be-fb39-416d-a9d2-d206582e5aeb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.215928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquired lock "refresh_cache-c612a1be-fb39-416d-a9d2-d206582e5aeb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.216052] env[62519]: DEBUG nova.network.neutron [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.384387] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802052, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.395154] env[62519]: DEBUG nova.compute.manager [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1440.601974] env[62519]: DEBUG nova.compute.manager [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Received event network-vif-plugged-c5d604f1-fa0e-433c-af5c-5e2116499fe1 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1440.604042] env[62519]: DEBUG oslo_concurrency.lockutils [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] Acquiring lock "c612a1be-fb39-416d-a9d2-d206582e5aeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.604042] env[62519]: DEBUG oslo_concurrency.lockutils [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] Lock "c612a1be-fb39-416d-a9d2-d206582e5aeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.604042] env[62519]: DEBUG oslo_concurrency.lockutils [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] Lock "c612a1be-fb39-416d-a9d2-d206582e5aeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.604042] env[62519]: DEBUG nova.compute.manager [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] No waiting events found dispatching network-vif-plugged-c5d604f1-fa0e-433c-af5c-5e2116499fe1 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1440.604042] env[62519]: WARNING nova.compute.manager [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Received unexpected event network-vif-plugged-c5d604f1-fa0e-433c-af5c-5e2116499fe1 for instance with vm_state building and task_state spawning. [ 1440.604291] env[62519]: DEBUG nova.compute.manager [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Received event network-changed-c5d604f1-fa0e-433c-af5c-5e2116499fe1 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1440.604291] env[62519]: DEBUG nova.compute.manager [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Refreshing instance network info cache due to event network-changed-c5d604f1-fa0e-433c-af5c-5e2116499fe1. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1440.604462] env[62519]: DEBUG oslo_concurrency.lockutils [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] Acquiring lock "refresh_cache-c612a1be-fb39-416d-a9d2-d206582e5aeb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.658167] env[62519]: DEBUG nova.network.neutron [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1440.692716] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802051, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551122} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.693536] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c61c893f-826b-4874-b253-de6fbffa9e5a/c61c893f-826b-4874-b253-de6fbffa9e5a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1440.693781] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1440.694218] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ddabfe4-b3d4-4ae7-b453-fdf9c94000c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.703464] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1440.703464] env[62519]: value = "task-1802053" [ 1440.703464] env[62519]: _type = "Task" [ 1440.703464] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.712735] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802053, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.731794] env[62519]: DEBUG nova.network.neutron [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.758367] env[62519]: DEBUG nova.network.neutron [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1440.882962] env[62519]: DEBUG oslo_vmware.api [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802052, 'name': PowerOnVM_Task, 'duration_secs': 0.519196} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.885720] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1440.885939] env[62519]: INFO nova.compute.manager [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Took 8.64 seconds to spawn the instance on the hypervisor. [ 1440.886139] env[62519]: DEBUG nova.compute.manager [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1440.887212] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561e3fbf-5756-4da6-bafa-0abe29d85654 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.919673] env[62519]: DEBUG nova.network.neutron [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Updating instance_info_cache with network_info: [{"id": "c5d604f1-fa0e-433c-af5c-5e2116499fe1", "address": "fa:16:3e:32:02:d4", "network": {"id": "b5e5e611-6414-4f2e-8c30-0a0921562f95", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1439677770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d19c8963364423880f5c2d61fb5800f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d604f1-fa", "ovs_interfaceid": "c5d604f1-fa0e-433c-af5c-5e2116499fe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.973099] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c57d67-cab7-4c68-910d-e23d3b5ba4b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.982538] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcbd2211-7881-4798-943a-f3cda2608452 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.012573] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaaca841-d8f2-4cfe-b1b0-4fe0195d91da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.021614] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c7a3e6-3ac1-419c-993b-9677ab913c74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.036558] env[62519]: DEBUG nova.compute.provider_tree [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.214335] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802053, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075423} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.214621] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1441.215425] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b885d0-cabc-4d5f-b40f-b78c5d1542b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.238370] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] c61c893f-826b-4874-b253-de6fbffa9e5a/c61c893f-826b-4874-b253-de6fbffa9e5a.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1441.238906] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Releasing lock "refresh_cache-c07e4d30-44bc-417b-8137-97f974aec932" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.239304] env[62519]: DEBUG nova.compute.manager [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1441.239494] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1441.239724] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45a4c639-c078-4487-bcde-2d1bf110256b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.254463] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2388d3a4-cc69-433a-bf14-22140569514e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.263547] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1441.264777] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed30e9ab-fe37-4390-be92-7c2edb410892 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.266347] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1441.266347] env[62519]: value = "task-1802054" [ 1441.266347] env[62519]: _type = "Task" [ 1441.266347] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.271770] env[62519]: DEBUG oslo_vmware.api [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1441.271770] env[62519]: value = "task-1802055" [ 1441.271770] env[62519]: _type = "Task" [ 1441.271770] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.274948] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802054, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.285603] env[62519]: DEBUG oslo_vmware.api [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802055, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.412113] env[62519]: DEBUG nova.compute.manager [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1441.420552] env[62519]: INFO nova.compute.manager [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Took 38.12 seconds to build instance. [ 1441.421718] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Releasing lock "refresh_cache-c612a1be-fb39-416d-a9d2-d206582e5aeb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.422010] env[62519]: DEBUG nova.compute.manager [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Instance network_info: |[{"id": "c5d604f1-fa0e-433c-af5c-5e2116499fe1", "address": "fa:16:3e:32:02:d4", "network": {"id": "b5e5e611-6414-4f2e-8c30-0a0921562f95", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1439677770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d19c8963364423880f5c2d61fb5800f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d604f1-fa", "ovs_interfaceid": "c5d604f1-fa0e-433c-af5c-5e2116499fe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1441.422315] env[62519]: DEBUG oslo_concurrency.lockutils [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] Acquired lock "refresh_cache-c612a1be-fb39-416d-a9d2-d206582e5aeb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.422625] env[62519]: DEBUG nova.network.neutron [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Refreshing network info cache for port c5d604f1-fa0e-433c-af5c-5e2116499fe1 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1441.424487] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:02:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e85cbc56-fee0-41f7-bc70-64f31775ce92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5d604f1-fa0e-433c-af5c-5e2116499fe1', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1441.435504] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Creating folder: Project (9d19c8963364423880f5c2d61fb5800f). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1441.441290] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a863633-d61c-4819-b201-806ba3607705 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.459200] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1441.459444] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1441.459644] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1441.459763] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1441.459905] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1441.460068] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1441.460281] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1441.460438] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1441.460595] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1441.460776] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1441.460946] env[62519]: DEBUG nova.virt.hardware [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1441.462280] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66891cf5-94e2-455f-b1c1-f70790db522c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.469588] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Created folder: Project (9d19c8963364423880f5c2d61fb5800f) in parent group-v373567. [ 1441.469884] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Creating folder: Instances. Parent ref: group-v373618. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1441.471084] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88b5f929-63d2-43c9-a64e-308267471d9a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.477545] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bc46ec-8bbd-4202-ad99-5efa66b94440 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.487765] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Created folder: Instances in parent group-v373618. [ 1441.488233] env[62519]: DEBUG oslo.service.loopingcall [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1441.496469] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1441.496965] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1441.502637] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Creating folder: Project (0baa7c415d7847e69101a75619ea519f). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1441.505337] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fff209a-88ab-4c6a-9a5b-e1f4979e6226 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.519514] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8eb92be-af19-455f-800a-d7927bf42870 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.528402] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1441.528402] env[62519]: value = "task-1802059" [ 1441.528402] env[62519]: _type = "Task" [ 1441.528402] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.532973] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Created folder: Project (0baa7c415d7847e69101a75619ea519f) in parent group-v373567. [ 1441.533668] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Creating folder: Instances. Parent ref: group-v373620. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1441.534359] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4ef76a7-f99e-4a67-87a8-ca317f9f4839 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.541294] env[62519]: DEBUG nova.scheduler.client.report [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1441.544423] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802059, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.554774] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Created folder: Instances in parent group-v373620. [ 1441.554774] env[62519]: DEBUG oslo.service.loopingcall [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1441.554974] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1441.555615] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c02a3743-8e7e-40cb-a508-eb1a2cc82bdc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.573839] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1441.573839] env[62519]: value = "task-1802061" [ 1441.573839] env[62519]: _type = "Task" [ 1441.573839] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.583268] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802061, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.743690] env[62519]: DEBUG nova.network.neutron [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Updated VIF entry in instance network info cache for port c5d604f1-fa0e-433c-af5c-5e2116499fe1. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1441.744164] env[62519]: DEBUG nova.network.neutron [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Updating instance_info_cache with network_info: [{"id": "c5d604f1-fa0e-433c-af5c-5e2116499fe1", "address": "fa:16:3e:32:02:d4", "network": {"id": "b5e5e611-6414-4f2e-8c30-0a0921562f95", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1439677770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d19c8963364423880f5c2d61fb5800f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d604f1-fa", "ovs_interfaceid": "c5d604f1-fa0e-433c-af5c-5e2116499fe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.776710] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802054, 'name': ReconfigVM_Task, 'duration_secs': 0.475554} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.779811] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Reconfigured VM instance instance-00000010 to attach disk [datastore1] c61c893f-826b-4874-b253-de6fbffa9e5a/c61c893f-826b-4874-b253-de6fbffa9e5a.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1441.780460] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b7d9cf1-272f-475a-b5e5-acfab462d232 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.788626] env[62519]: DEBUG oslo_vmware.api [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802055, 'name': PowerOffVM_Task, 'duration_secs': 0.226151} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.790015] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1441.790254] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1441.790574] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1441.790574] env[62519]: value = "task-1802062" [ 1441.790574] env[62519]: _type = "Task" [ 1441.790574] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.790767] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5022ebd9-6488-4265-a93b-f6fd333bd1c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.803142] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802062, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.824062] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1441.824428] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1441.824732] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Deleting the datastore file [datastore1] c07e4d30-44bc-417b-8137-97f974aec932 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1441.825145] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7132358d-eb82-44f7-984d-3026ca838962 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.833808] env[62519]: DEBUG oslo_vmware.api [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for the task: (returnval){ [ 1441.833808] env[62519]: value = "task-1802064" [ 1441.833808] env[62519]: _type = "Task" [ 1441.833808] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.845152] env[62519]: DEBUG oslo_vmware.api [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802064, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.039638] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802059, 'name': CreateVM_Task, 'duration_secs': 0.41875} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.040307] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1442.042278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.042278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.042278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1442.042278] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-141cdc22-0998-46c7-8178-fa030e22a0a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.048287] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.048287] env[62519]: DEBUG nova.compute.manager [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1442.049966] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1442.049966] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524660a6-f0c1-0a48-74bc-daf3e0b56cb7" [ 1442.049966] env[62519]: _type = "Task" [ 1442.049966] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.050246] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.722s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.052365] env[62519]: INFO nova.compute.claims [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1442.066769] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524660a6-f0c1-0a48-74bc-daf3e0b56cb7, 'name': SearchDatastore_Task, 'duration_secs': 0.012229} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.067129] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.067236] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1442.067535] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.067705] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.067885] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1442.068155] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76aaaf4f-33ef-4239-9046-90d46ff96f43 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.079312] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1442.079919] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1442.081293] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdbde0c3-e820-496f-8fd8-9895db8dfe73 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.088048] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802061, 'name': CreateVM_Task, 'duration_secs': 0.339215} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.088209] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1442.089307] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.089307] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.089503] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1442.089695] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-931f59da-9b6f-459c-ae75-34dfc3ae08b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.093960] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1442.093960] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52de06e7-780b-b511-cb5a-b4b2f3d42a8a" [ 1442.093960] env[62519]: _type = "Task" [ 1442.093960] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.099957] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1442.099957] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ada4e7-00c7-5a81-22e0-9300db13ed23" [ 1442.099957] env[62519]: _type = "Task" [ 1442.099957] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.108526] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52de06e7-780b-b511-cb5a-b4b2f3d42a8a, 'name': SearchDatastore_Task, 'duration_secs': 0.011535} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.109591] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92e34b16-114b-4bf4-8a47-99c62a722410 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.116600] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ada4e7-00c7-5a81-22e0-9300db13ed23, 'name': SearchDatastore_Task, 'duration_secs': 0.01028} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.117420] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.118204] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1442.118204] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.121191] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1442.121191] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ae31f7-97dc-0fbd-7bd9-76cd0ee5a23f" [ 1442.121191] env[62519]: _type = "Task" [ 1442.121191] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.130250] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ae31f7-97dc-0fbd-7bd9-76cd0ee5a23f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.247715] env[62519]: DEBUG oslo_concurrency.lockutils [req-8ceeee9d-380a-4a0f-881d-b6efb8aaa37a req-115a42cb-4ded-440f-9a3b-15eb14c0621b service nova] Releasing lock "refresh_cache-c612a1be-fb39-416d-a9d2-d206582e5aeb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.306163] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802062, 'name': Rename_Task, 'duration_secs': 0.165056} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.306163] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1442.306375] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c33e43fe-bdee-4e34-b75b-256000f17eb4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.314210] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1442.314210] env[62519]: value = "task-1802065" [ 1442.314210] env[62519]: _type = "Task" [ 1442.314210] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.323754] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.349678] env[62519]: DEBUG oslo_vmware.api [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Task: {'id': task-1802064, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09807} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.349678] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1442.349678] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1442.349678] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1442.349678] env[62519]: INFO nova.compute.manager [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1442.349970] env[62519]: DEBUG oslo.service.loopingcall [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1442.349970] env[62519]: DEBUG nova.compute.manager [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1442.349970] env[62519]: DEBUG nova.network.neutron [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1442.370772] env[62519]: DEBUG nova.network.neutron [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1442.557763] env[62519]: DEBUG nova.compute.utils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1442.559600] env[62519]: DEBUG nova.compute.manager [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Not allocating networking since 'none' was specified. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1985}} [ 1442.633902] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ae31f7-97dc-0fbd-7bd9-76cd0ee5a23f, 'name': SearchDatastore_Task, 'duration_secs': 0.010502} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.634056] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.634430] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c612a1be-fb39-416d-a9d2-d206582e5aeb/c612a1be-fb39-416d-a9d2-d206582e5aeb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1442.634744] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.635031] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1442.635545] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0099006e-04cd-420f-bfd9-79ee48175c8e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.638556] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-847c0a07-fd72-4447-9814-0b043e3a0cab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.648096] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1442.648096] env[62519]: value = "task-1802066" [ 1442.648096] env[62519]: _type = "Task" [ 1442.648096] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.654846] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1442.655432] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1442.656885] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4b02d32-6af5-4048-9b78-9162679a5b6a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.664787] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.669411] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1442.669411] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52848598-2c2e-ba5e-b6dd-0eb9aacc61df" [ 1442.669411] env[62519]: _type = "Task" [ 1442.669411] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.679231] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52848598-2c2e-ba5e-b6dd-0eb9aacc61df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.825710] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802065, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.873886] env[62519]: DEBUG nova.network.neutron [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.947903] env[62519]: DEBUG oslo_concurrency.lockutils [None req-39a021c3-8855-476c-8305-d806d48e02a7 tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.983s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.061212] env[62519]: DEBUG nova.compute.manager [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1443.161818] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502321} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.162109] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c612a1be-fb39-416d-a9d2-d206582e5aeb/c612a1be-fb39-416d-a9d2-d206582e5aeb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1443.162328] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1443.162607] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0fa87eb2-fee3-411d-b60e-d533756c5301 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.173985] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1443.173985] env[62519]: value = "task-1802067" [ 1443.173985] env[62519]: _type = "Task" [ 1443.173985] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.194566] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52848598-2c2e-ba5e-b6dd-0eb9aacc61df, 'name': SearchDatastore_Task, 'duration_secs': 0.009162} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.195190] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802067, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.199544] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a412a0fe-9f5b-43cc-9eae-eed85172ebfa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.204813] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1443.204813] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c665a7-4790-c615-619d-a9b27b6cf562" [ 1443.204813] env[62519]: _type = "Task" [ 1443.204813] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.213101] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c665a7-4790-c615-619d-a9b27b6cf562, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.327479] env[62519]: DEBUG oslo_vmware.api [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802065, 'name': PowerOnVM_Task, 'duration_secs': 0.736257} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.327803] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1443.328077] env[62519]: INFO nova.compute.manager [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Took 7.88 seconds to spawn the instance on the hypervisor. [ 1443.328353] env[62519]: DEBUG nova.compute.manager [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1443.329174] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7eece10-e3a8-4607-abcc-a722318d716a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.379892] env[62519]: INFO nova.compute.manager [-] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Took 1.03 seconds to deallocate network for instance. [ 1443.451444] env[62519]: DEBUG nova.compute.manager [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1443.615108] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1520e322-bf79-4600-8bdb-810bfdcb92d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.624838] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8316b9-6d9b-4c24-8e52-1cb685166e2a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.663882] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5d8a8d-fe49-4f5b-9a36-8817fd9f3b22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.669284] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e6e61c-1af0-42a3-a33d-d389b9b0d2bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.686150] env[62519]: DEBUG nova.compute.provider_tree [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1443.698081] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802067, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068031} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.699369] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1443.701122] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc81986-7337-42e6-8335-2f7352e2551b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.728898] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] c612a1be-fb39-416d-a9d2-d206582e5aeb/c612a1be-fb39-416d-a9d2-d206582e5aeb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1443.729951] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39beaafe-78cd-4853-95c1-b26900e9f623 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.748815] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c665a7-4790-c615-619d-a9b27b6cf562, 'name': SearchDatastore_Task, 'duration_secs': 0.009651} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.749291] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.749549] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 42497ab5-cce9-4614-a6d1-dffbf6764d7b/42497ab5-cce9-4614-a6d1-dffbf6764d7b.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1443.749795] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7661eca-517d-4d13-9cb6-69e304213877 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.754223] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1443.754223] env[62519]: value = "task-1802068" [ 1443.754223] env[62519]: _type = "Task" [ 1443.754223] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.758521] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1443.758521] env[62519]: value = "task-1802069" [ 1443.758521] env[62519]: _type = "Task" [ 1443.758521] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.765477] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.770941] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.851760] env[62519]: INFO nova.compute.manager [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Took 38.46 seconds to build instance. [ 1443.869216] env[62519]: DEBUG nova.compute.manager [req-27813b76-97b0-4ea4-8187-a3fbd2140ce3 req-11c4ef88-06ea-45bd-af61-edd2b6af56c1 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Received event network-changed-57934d0f-3be6-4e2d-9e86-b5500fae4b3b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1443.869326] env[62519]: DEBUG nova.compute.manager [req-27813b76-97b0-4ea4-8187-a3fbd2140ce3 req-11c4ef88-06ea-45bd-af61-edd2b6af56c1 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Refreshing instance network info cache due to event network-changed-57934d0f-3be6-4e2d-9e86-b5500fae4b3b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1443.869536] env[62519]: DEBUG oslo_concurrency.lockutils [req-27813b76-97b0-4ea4-8187-a3fbd2140ce3 req-11c4ef88-06ea-45bd-af61-edd2b6af56c1 service nova] Acquiring lock "refresh_cache-91902e7f-8c15-447b-a3a8-04433434b1b6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.869628] env[62519]: DEBUG oslo_concurrency.lockutils [req-27813b76-97b0-4ea4-8187-a3fbd2140ce3 req-11c4ef88-06ea-45bd-af61-edd2b6af56c1 service nova] Acquired lock "refresh_cache-91902e7f-8c15-447b-a3a8-04433434b1b6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.870174] env[62519]: DEBUG nova.network.neutron [req-27813b76-97b0-4ea4-8187-a3fbd2140ce3 req-11c4ef88-06ea-45bd-af61-edd2b6af56c1 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Refreshing network info cache for port 57934d0f-3be6-4e2d-9e86-b5500fae4b3b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1443.885953] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.979583] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.074481] env[62519]: DEBUG nova.compute.manager [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1444.115018] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1444.115018] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1444.115018] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1444.115018] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1444.115348] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1444.115348] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1444.115348] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1444.115348] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1444.115566] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1444.115608] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1444.115759] env[62519]: DEBUG nova.virt.hardware [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1444.116764] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95163a0c-e268-4ab4-b578-d4412be8a1a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.126493] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79c8325-3d37-492b-9994-280c00ed80d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.144459] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1444.152024] env[62519]: DEBUG oslo.service.loopingcall [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1444.152024] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1444.152024] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0500e62b-8639-439b-8e4c-250daf38bd98 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.174659] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1444.174659] env[62519]: value = "task-1802070" [ 1444.174659] env[62519]: _type = "Task" [ 1444.174659] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.185290] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802070, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.193494] env[62519]: DEBUG nova.scheduler.client.report [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1444.278208] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802068, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.279246] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802069, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.686624] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802070, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.702025] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.649s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.702025] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1444.702777] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.624s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.704457] env[62519]: DEBUG nova.objects.instance [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lazy-loading 'resources' on Instance uuid 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1444.763176] env[62519]: DEBUG nova.network.neutron [req-27813b76-97b0-4ea4-8187-a3fbd2140ce3 req-11c4ef88-06ea-45bd-af61-edd2b6af56c1 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Updated VIF entry in instance network info cache for port 57934d0f-3be6-4e2d-9e86-b5500fae4b3b. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1444.763774] env[62519]: DEBUG nova.network.neutron [req-27813b76-97b0-4ea4-8187-a3fbd2140ce3 req-11c4ef88-06ea-45bd-af61-edd2b6af56c1 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Updating instance_info_cache with network_info: [{"id": "57934d0f-3be6-4e2d-9e86-b5500fae4b3b", "address": "fa:16:3e:f5:c6:8e", "network": {"id": "562e50fb-c1d1-4596-a481-657942fd7331", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-962652241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "679c16f2d0ab479ab1e7395dab280c96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57934d0f-3b", "ovs_interfaceid": "57934d0f-3be6-4e2d-9e86-b5500fae4b3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.765667] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.777229] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802068, 'name': ReconfigVM_Task, 'duration_secs': 0.56289} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.781121] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Reconfigured VM instance instance-00000011 to attach disk [datastore1] c612a1be-fb39-416d-a9d2-d206582e5aeb/c612a1be-fb39-416d-a9d2-d206582e5aeb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1444.781121] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bc854e5-5770-4ee8-9309-41e6e42d60e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.788645] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802069, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.793631] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1444.793631] env[62519]: value = "task-1802071" [ 1444.793631] env[62519]: _type = "Task" [ 1444.793631] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.801743] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802071, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.027404] env[62519]: DEBUG nova.compute.manager [req-7f1f9afe-c8f9-46e9-9891-08c3bf6ee60c req-f51ac235-fff0-4438-ac2a-e694c595bffc service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Received event network-changed-8a1a2471-fe2e-4bb8-917a-135d0d5f4859 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1445.027404] env[62519]: DEBUG nova.compute.manager [req-7f1f9afe-c8f9-46e9-9891-08c3bf6ee60c req-f51ac235-fff0-4438-ac2a-e694c595bffc service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Refreshing instance network info cache due to event network-changed-8a1a2471-fe2e-4bb8-917a-135d0d5f4859. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1445.027885] env[62519]: DEBUG oslo_concurrency.lockutils [req-7f1f9afe-c8f9-46e9-9891-08c3bf6ee60c req-f51ac235-fff0-4438-ac2a-e694c595bffc service nova] Acquiring lock "refresh_cache-c61c893f-826b-4874-b253-de6fbffa9e5a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.027885] env[62519]: DEBUG oslo_concurrency.lockutils [req-7f1f9afe-c8f9-46e9-9891-08c3bf6ee60c req-f51ac235-fff0-4438-ac2a-e694c595bffc service nova] Acquired lock "refresh_cache-c61c893f-826b-4874-b253-de6fbffa9e5a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.027885] env[62519]: DEBUG nova.network.neutron [req-7f1f9afe-c8f9-46e9-9891-08c3bf6ee60c req-f51ac235-fff0-4438-ac2a-e694c595bffc service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Refreshing network info cache for port 8a1a2471-fe2e-4bb8-917a-135d0d5f4859 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1445.187375] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802070, 'name': CreateVM_Task, 'duration_secs': 0.618753} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.187549] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1445.188018] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.188190] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.188515] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1445.188771] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7f10947-4d5d-42d7-a711-88c66483599a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.194559] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1445.194559] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52995cdf-fe6f-ceef-2843-ecc2aaa42ed8" [ 1445.194559] env[62519]: _type = "Task" [ 1445.194559] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.209080] env[62519]: DEBUG nova.compute.utils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1445.213883] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52995cdf-fe6f-ceef-2843-ecc2aaa42ed8, 'name': SearchDatastore_Task} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.213883] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1445.213883] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1445.216179] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.216179] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1445.216179] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.216179] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.216344] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1445.217111] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2e23266-da00-4a67-a64b-b03bd666b33f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.226028] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1445.226695] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1445.227742] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2385e498-db6c-42b6-82ee-29c81c89dbd5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.233949] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1445.233949] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521559df-5561-14d1-893c-a160efa90c89" [ 1445.233949] env[62519]: _type = "Task" [ 1445.233949] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.250042] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521559df-5561-14d1-893c-a160efa90c89, 'name': SearchDatastore_Task, 'duration_secs': 0.008648} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.250042] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebe114ed-836f-448a-8178-39901f013618 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.254131] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1445.254131] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5212e2a1-325e-fadd-617c-dce5ddd1ab42" [ 1445.254131] env[62519]: _type = "Task" [ 1445.254131] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.265027] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5212e2a1-325e-fadd-617c-dce5ddd1ab42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.268289] env[62519]: DEBUG oslo_concurrency.lockutils [req-27813b76-97b0-4ea4-8187-a3fbd2140ce3 req-11c4ef88-06ea-45bd-af61-edd2b6af56c1 service nova] Releasing lock "refresh_cache-91902e7f-8c15-447b-a3a8-04433434b1b6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.277175] env[62519]: DEBUG nova.policy [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05b2585b9472460ab5d30c3451ff31ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bde061b2dd1945bdae6c789325741ade', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1445.283501] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802069, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.504} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.284144] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 42497ab5-cce9-4614-a6d1-dffbf6764d7b/42497ab5-cce9-4614-a6d1-dffbf6764d7b.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1445.284144] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1445.284316] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1480fd1a-13e1-4ba3-82be-65fa717f9a5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.291898] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1445.291898] env[62519]: value = "task-1802072" [ 1445.291898] env[62519]: _type = "Task" [ 1445.291898] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.305782] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802072, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.312407] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802071, 'name': Rename_Task, 'duration_secs': 0.443183} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.312407] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.312689] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27482595-2836-4818-9841-0bdcd425dce2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.319292] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1445.319292] env[62519]: value = "task-1802073" [ 1445.319292] env[62519]: _type = "Task" [ 1445.319292] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.330197] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802073, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.357982] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a045cbfb-ac93-4591-9bfc-366cc2fb264e tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.332s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.711052] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Successfully created port: 60084240-cc60-4e07-9cae-3f7d36559e40 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1445.715931] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1445.767851] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5212e2a1-325e-fadd-617c-dce5ddd1ab42, 'name': SearchDatastore_Task, 'duration_secs': 0.014019} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.768171] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.768382] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb/49221ea3-d457-4cf5-97a9-9ae74c4e86fb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1445.768630] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da643457-0ded-4a88-afef-40833c83ef7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.778035] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1445.778035] env[62519]: value = "task-1802074" [ 1445.778035] env[62519]: _type = "Task" [ 1445.778035] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.790339] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802074, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.801151] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802072, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067384} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.801454] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1445.802249] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c05305-b4c9-4379-bc07-2b82861e43b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.826834] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 42497ab5-cce9-4614-a6d1-dffbf6764d7b/42497ab5-cce9-4614-a6d1-dffbf6764d7b.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1445.832287] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3fb12bc2-f7ed-45a8-aa69-06ca49c2fe29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.858924] env[62519]: DEBUG oslo_vmware.api [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802073, 'name': PowerOnVM_Task, 'duration_secs': 0.445816} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.862368] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1445.862679] env[62519]: INFO nova.compute.manager [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Took 7.25 seconds to spawn the instance on the hypervisor. [ 1445.862876] env[62519]: DEBUG nova.compute.manager [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1445.863254] env[62519]: DEBUG nova.compute.manager [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1445.865898] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1445.865898] env[62519]: value = "task-1802075" [ 1445.865898] env[62519]: _type = "Task" [ 1445.865898] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.867268] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf451f7-be8b-4916-99da-c46d025d8d7f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.883061] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.914071] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c460a40-e21b-4687-9bf4-dcd8d643b6ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.922783] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662180ea-26cd-41ec-b384-44b0d3234a71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.957537] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55778fa6-265c-48f3-9d1c-8109e1e00c59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.974533] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efb0e78-347b-4e2e-874d-549bfe3e4380 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.989656] env[62519]: DEBUG nova.compute.provider_tree [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1446.061612] env[62519]: DEBUG nova.network.neutron [req-7f1f9afe-c8f9-46e9-9891-08c3bf6ee60c req-f51ac235-fff0-4438-ac2a-e694c595bffc service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Updated VIF entry in instance network info cache for port 8a1a2471-fe2e-4bb8-917a-135d0d5f4859. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1446.062017] env[62519]: DEBUG nova.network.neutron [req-7f1f9afe-c8f9-46e9-9891-08c3bf6ee60c req-f51ac235-fff0-4438-ac2a-e694c595bffc service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Updating instance_info_cache with network_info: [{"id": "8a1a2471-fe2e-4bb8-917a-135d0d5f4859", "address": "fa:16:3e:cc:22:35", "network": {"id": "58441f17-708d-4a4c-9e78-8a8d403416ae", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-132258554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92b737895c7c42f78fbc5d0fff165dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a1a2471-fe", "ovs_interfaceid": "8a1a2471-fe2e-4bb8-917a-135d0d5f4859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.289334] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802074, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468655} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.289579] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb/49221ea3-d457-4cf5-97a9-9ae74c4e86fb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1446.289792] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1446.290048] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76cc7f9f-6a22-4560-bdfb-4672a5fc46a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.297027] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1446.297027] env[62519]: value = "task-1802076" [ 1446.297027] env[62519]: _type = "Task" [ 1446.297027] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.305358] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802076, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.381518] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802075, 'name': ReconfigVM_Task, 'duration_secs': 0.32652} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.381518] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 42497ab5-cce9-4614-a6d1-dffbf6764d7b/42497ab5-cce9-4614-a6d1-dffbf6764d7b.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1446.381763] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e077789-03d4-4d83-9a75-5d6db52373db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.392465] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1446.392465] env[62519]: value = "task-1802077" [ 1446.392465] env[62519]: _type = "Task" [ 1446.392465] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.400572] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.401869] env[62519]: INFO nova.compute.manager [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Took 34.97 seconds to build instance. [ 1446.411661] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802077, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.492951] env[62519]: DEBUG nova.scheduler.client.report [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1446.567881] env[62519]: DEBUG oslo_concurrency.lockutils [req-7f1f9afe-c8f9-46e9-9891-08c3bf6ee60c req-f51ac235-fff0-4438-ac2a-e694c595bffc service nova] Releasing lock "refresh_cache-c61c893f-826b-4874-b253-de6fbffa9e5a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.729062] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1446.758639] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1446.758880] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1446.759040] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1446.759235] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1446.759358] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1446.759499] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1446.759701] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1446.759854] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1446.760127] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1446.760363] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1446.760547] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1446.761426] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3960490b-629f-4775-9f10-59fe4daffd47 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.772114] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c201028b-1c35-4dbd-bef7-a264b263929d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.807644] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802076, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062612} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.808092] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1446.808994] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510332c6-0460-4c9b-b6bd-b182bf01538d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.833237] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb/49221ea3-d457-4cf5-97a9-9ae74c4e86fb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1446.833812] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ce379af-9417-43db-8f33-61a9dfdf2441 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.857664] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1446.857664] env[62519]: value = "task-1802078" [ 1446.857664] env[62519]: _type = "Task" [ 1446.857664] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.866070] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802078, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.903162] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802077, 'name': Rename_Task, 'duration_secs': 0.421566} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.905364] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1446.906044] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90bb71d5-2e65-4ff2-b329-d04bb22d6280 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.913735] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1446.913735] env[62519]: value = "task-1802079" [ 1446.913735] env[62519]: _type = "Task" [ 1446.913735] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.922085] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.000641] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.298s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.003339] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.283s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.005602] env[62519]: INFO nova.compute.claims [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1447.021147] env[62519]: INFO nova.scheduler.client.report [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Deleted allocations for instance 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1 [ 1447.370859] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802078, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.423924] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802079, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.481286] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Successfully updated port: 60084240-cc60-4e07-9cae-3f7d36559e40 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1447.529722] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e0b428ce-832c-48b3-9b41-b8eaaec78edb tempest-ServerDiagnosticsTest-1027319288 tempest-ServerDiagnosticsTest-1027319288-project-member] Lock "1118c1e5-1aa8-4f52-9fb9-e86531bf83d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.821s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.559079] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquiring lock "c612a1be-fb39-416d-a9d2-d206582e5aeb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.719976] env[62519]: DEBUG nova.compute.manager [req-dbac63bf-844a-468b-ac4d-4c940c3664d2 req-d2a3963e-1e49-463e-9bde-ab096fba8d4d service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Received event network-vif-plugged-60084240-cc60-4e07-9cae-3f7d36559e40 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1447.749788] env[62519]: DEBUG oslo_concurrency.lockutils [req-dbac63bf-844a-468b-ac4d-4c940c3664d2 req-d2a3963e-1e49-463e-9bde-ab096fba8d4d service nova] Acquiring lock "021c3287-5887-457e-9b3a-233308fb9b23-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.749788] env[62519]: DEBUG oslo_concurrency.lockutils [req-dbac63bf-844a-468b-ac4d-4c940c3664d2 req-d2a3963e-1e49-463e-9bde-ab096fba8d4d service nova] Lock "021c3287-5887-457e-9b3a-233308fb9b23-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.749788] env[62519]: DEBUG oslo_concurrency.lockutils [req-dbac63bf-844a-468b-ac4d-4c940c3664d2 req-d2a3963e-1e49-463e-9bde-ab096fba8d4d service nova] Lock "021c3287-5887-457e-9b3a-233308fb9b23-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.749788] env[62519]: DEBUG nova.compute.manager [req-dbac63bf-844a-468b-ac4d-4c940c3664d2 req-d2a3963e-1e49-463e-9bde-ab096fba8d4d service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] No waiting events found dispatching network-vif-plugged-60084240-cc60-4e07-9cae-3f7d36559e40 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1447.749788] env[62519]: WARNING nova.compute.manager [req-dbac63bf-844a-468b-ac4d-4c940c3664d2 req-d2a3963e-1e49-463e-9bde-ab096fba8d4d service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Received unexpected event network-vif-plugged-60084240-cc60-4e07-9cae-3f7d36559e40 for instance with vm_state building and task_state spawning. [ 1447.870884] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802078, 'name': ReconfigVM_Task, 'duration_secs': 0.579257} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.870884] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb/49221ea3-d457-4cf5-97a9-9ae74c4e86fb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1447.871016] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc70eaeb-a79f-4549-8350-3e058b9fc183 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.877943] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1447.877943] env[62519]: value = "task-1802080" [ 1447.877943] env[62519]: _type = "Task" [ 1447.877943] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.886163] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802080, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.909222] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f54a75ce-3152-4fde-8da2-c76420839723 tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "c612a1be-fb39-416d-a9d2-d206582e5aeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.903s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.910866] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "c612a1be-fb39-416d-a9d2-d206582e5aeb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.352s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.912646] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquiring lock "c612a1be-fb39-416d-a9d2-d206582e5aeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.912646] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "c612a1be-fb39-416d-a9d2-d206582e5aeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.912646] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "c612a1be-fb39-416d-a9d2-d206582e5aeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.914894] env[62519]: INFO nova.compute.manager [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Terminating instance [ 1447.930853] env[62519]: DEBUG oslo_vmware.api [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802079, 'name': PowerOnVM_Task, 'duration_secs': 0.80494} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.931561] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1447.931827] env[62519]: INFO nova.compute.manager [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Took 6.52 seconds to spawn the instance on the hypervisor. [ 1447.933126] env[62519]: DEBUG nova.compute.manager [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1447.933996] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ce13c2-21a0-4efc-a819-962d2aee0198 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.991359] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "refresh_cache-021c3287-5887-457e-9b3a-233308fb9b23" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.991359] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired lock "refresh_cache-021c3287-5887-457e-9b3a-233308fb9b23" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.991359] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1448.389752] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802080, 'name': Rename_Task, 'duration_secs': 0.165578} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.393531] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1448.394901] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec6441a5-6ddf-47b7-b39a-397e0e806cf7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.407825] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1448.407825] env[62519]: value = "task-1802081" [ 1448.407825] env[62519]: _type = "Task" [ 1448.407825] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.418046] env[62519]: DEBUG nova.compute.manager [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1448.421013] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.425205] env[62519]: DEBUG nova.compute.manager [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1448.425581] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1448.426774] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395be7f9-cdd6-456b-9533-570b516017c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.435456] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1448.436028] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09ea41f2-b435-4e94-a5f6-66fd05b36201 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.449022] env[62519]: DEBUG oslo_vmware.api [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1448.449022] env[62519]: value = "task-1802082" [ 1448.449022] env[62519]: _type = "Task" [ 1448.449022] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.463099] env[62519]: DEBUG oslo_vmware.api [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.463737] env[62519]: INFO nova.compute.manager [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Took 33.91 seconds to build instance. [ 1448.525839] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1448.656251] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cf0b97-f138-411a-83df-fc9c35263ac4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.666111] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f992024-bbb6-4454-bffd-75de1da1706f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.701822] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f8f15a-3829-4759-8698-42cdc72f396c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.710747] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6be9dc2-bc1e-4ca9-9864-6c68a7f34681 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.724803] env[62519]: DEBUG nova.compute.provider_tree [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1448.918642] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802081, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.919672] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Updating instance_info_cache with network_info: [{"id": "60084240-cc60-4e07-9cae-3f7d36559e40", "address": "fa:16:3e:af:49:51", "network": {"id": "61c38727-028c-44d6-ad51-8b8fe4c3b789", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1533919301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde061b2dd1945bdae6c789325741ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60084240-cc", "ovs_interfaceid": "60084240-cc60-4e07-9cae-3f7d36559e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.940494] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.958183] env[62519]: DEBUG oslo_vmware.api [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802082, 'name': PowerOffVM_Task, 'duration_secs': 0.272687} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.958183] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1448.958183] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1448.958183] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6244e6a1-aee1-4371-b4b1-17f92b52e1df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.043163] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1449.043271] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1449.043517] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Deleting the datastore file [datastore1] c612a1be-fb39-416d-a9d2-d206582e5aeb {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1449.043784] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd757c22-99c6-4a80-9ec6-f51b91f58ae8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.050016] env[62519]: DEBUG oslo_vmware.api [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for the task: (returnval){ [ 1449.050016] env[62519]: value = "task-1802084" [ 1449.050016] env[62519]: _type = "Task" [ 1449.050016] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.058734] env[62519]: DEBUG oslo_vmware.api [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.228737] env[62519]: DEBUG nova.scheduler.client.report [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1449.427054] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Releasing lock "refresh_cache-021c3287-5887-457e-9b3a-233308fb9b23" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.427345] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Instance network_info: |[{"id": "60084240-cc60-4e07-9cae-3f7d36559e40", "address": "fa:16:3e:af:49:51", "network": {"id": "61c38727-028c-44d6-ad51-8b8fe4c3b789", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1533919301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde061b2dd1945bdae6c789325741ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60084240-cc", "ovs_interfaceid": "60084240-cc60-4e07-9cae-3f7d36559e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1449.428079] env[62519]: DEBUG oslo_vmware.api [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802081, 'name': PowerOnVM_Task, 'duration_secs': 0.813799} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.429480] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:49:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c7821ea-f92f-4f06-a4cb-05e1186a9d22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60084240-cc60-4e07-9cae-3f7d36559e40', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1449.443412] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Creating folder: Project (bde061b2dd1945bdae6c789325741ade). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1449.443412] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1449.443835] env[62519]: INFO nova.compute.manager [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Took 5.37 seconds to spawn the instance on the hypervisor. [ 1449.444271] env[62519]: DEBUG nova.compute.manager [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1449.444579] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66a125a6-a974-4a74-b4ff-88577537e46a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.447011] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3942b890-52fa-4101-84e5-83fc9ca5b8b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.460373] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Created folder: Project (bde061b2dd1945bdae6c789325741ade) in parent group-v373567. [ 1449.460616] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Creating folder: Instances. Parent ref: group-v373625. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1449.460777] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60d265b8-414a-40b4-a513-7278aa3eb24a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.470055] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Created folder: Instances in parent group-v373625. [ 1449.470419] env[62519]: DEBUG oslo.service.loopingcall [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1449.470781] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1449.471040] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48170205-555f-4c21-b515-94b025f9d4ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.492251] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1449.492251] env[62519]: value = "task-1802087" [ 1449.492251] env[62519]: _type = "Task" [ 1449.492251] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.500967] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802087, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.560859] env[62519]: DEBUG oslo_vmware.api [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Task: {'id': task-1802084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147031} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.561160] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1449.561349] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1449.561529] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1449.561689] env[62519]: INFO nova.compute.manager [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1449.561923] env[62519]: DEBUG oslo.service.loopingcall [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1449.562122] env[62519]: DEBUG nova.compute.manager [-] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1449.562222] env[62519]: DEBUG nova.network.neutron [-] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1449.736819] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.733s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.737453] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1449.740138] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.853s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.741644] env[62519]: INFO nova.compute.claims [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1449.777118] env[62519]: DEBUG nova.compute.manager [req-29f17e2f-6482-4674-98bb-2fe008a6e4cc req-48cb40ba-7092-454f-9965-05e5e8b4f63e service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Received event network-changed-60084240-cc60-4e07-9cae-3f7d36559e40 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1449.777305] env[62519]: DEBUG nova.compute.manager [req-29f17e2f-6482-4674-98bb-2fe008a6e4cc req-48cb40ba-7092-454f-9965-05e5e8b4f63e service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Refreshing instance network info cache due to event network-changed-60084240-cc60-4e07-9cae-3f7d36559e40. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1449.777604] env[62519]: DEBUG oslo_concurrency.lockutils [req-29f17e2f-6482-4674-98bb-2fe008a6e4cc req-48cb40ba-7092-454f-9965-05e5e8b4f63e service nova] Acquiring lock "refresh_cache-021c3287-5887-457e-9b3a-233308fb9b23" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.777715] env[62519]: DEBUG oslo_concurrency.lockutils [req-29f17e2f-6482-4674-98bb-2fe008a6e4cc req-48cb40ba-7092-454f-9965-05e5e8b4f63e service nova] Acquired lock "refresh_cache-021c3287-5887-457e-9b3a-233308fb9b23" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.777809] env[62519]: DEBUG nova.network.neutron [req-29f17e2f-6482-4674-98bb-2fe008a6e4cc req-48cb40ba-7092-454f-9965-05e5e8b4f63e service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Refreshing network info cache for port 60084240-cc60-4e07-9cae-3f7d36559e40 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1449.968599] env[62519]: INFO nova.compute.manager [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Took 33.56 seconds to build instance. [ 1449.973526] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fcf29d77-c706-4a19-9381-3a93a4c3aaf6 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "42497ab5-cce9-4614-a6d1-dffbf6764d7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.894s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.003203] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802087, 'name': CreateVM_Task, 'duration_secs': 0.438103} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.003560] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1450.004335] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.004644] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.005073] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1450.005511] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af3ed8a9-91bc-4bf4-8183-27329ecd419d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.013334] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1450.013334] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523ceb9d-8017-ac14-36da-40cce5152f52" [ 1450.013334] env[62519]: _type = "Task" [ 1450.013334] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.022246] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523ceb9d-8017-ac14-36da-40cce5152f52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.249178] env[62519]: DEBUG nova.compute.utils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1450.251967] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1450.257069] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1450.372666] env[62519]: DEBUG nova.policy [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05b2585b9472460ab5d30c3451ff31ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bde061b2dd1945bdae6c789325741ade', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1450.476020] env[62519]: DEBUG nova.compute.manager [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1450.524794] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523ceb9d-8017-ac14-36da-40cce5152f52, 'name': SearchDatastore_Task, 'duration_secs': 0.016858} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.529619] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.529619] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1450.529619] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.529619] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.530640] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1450.530640] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06e3a421-6eca-4c2c-96cb-c1d563410156 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.539200] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1450.540027] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1450.540567] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e46e3ee-2591-4482-a7dd-831510fbbab1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.549695] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1450.549695] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5206f415-e0ea-e1e0-46b7-c42e49cad3fc" [ 1450.549695] env[62519]: _type = "Task" [ 1450.549695] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.562258] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5206f415-e0ea-e1e0-46b7-c42e49cad3fc, 'name': SearchDatastore_Task, 'duration_secs': 0.009202} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.564900] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-676ff3c8-c12c-491b-8296-16c8e5a5b9a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.570018] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1450.570018] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5282573b-8feb-01b5-5b84-73628e2ec43a" [ 1450.570018] env[62519]: _type = "Task" [ 1450.570018] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.575216] env[62519]: DEBUG nova.network.neutron [-] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.583511] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5282573b-8feb-01b5-5b84-73628e2ec43a, 'name': SearchDatastore_Task, 'duration_secs': 0.010312} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.584838] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.585165] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 021c3287-5887-457e-9b3a-233308fb9b23/021c3287-5887-457e-9b3a-233308fb9b23.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1450.585493] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96342ab4-b615-429d-92b5-4b34d361e387 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.593640] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1450.593640] env[62519]: value = "task-1802088" [ 1450.593640] env[62519]: _type = "Task" [ 1450.593640] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.602938] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.753376] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1450.827121] env[62519]: DEBUG nova.network.neutron [req-29f17e2f-6482-4674-98bb-2fe008a6e4cc req-48cb40ba-7092-454f-9965-05e5e8b4f63e service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Updated VIF entry in instance network info cache for port 60084240-cc60-4e07-9cae-3f7d36559e40. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1450.827619] env[62519]: DEBUG nova.network.neutron [req-29f17e2f-6482-4674-98bb-2fe008a6e4cc req-48cb40ba-7092-454f-9965-05e5e8b4f63e service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Updating instance_info_cache with network_info: [{"id": "60084240-cc60-4e07-9cae-3f7d36559e40", "address": "fa:16:3e:af:49:51", "network": {"id": "61c38727-028c-44d6-ad51-8b8fe4c3b789", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1533919301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde061b2dd1945bdae6c789325741ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60084240-cc", "ovs_interfaceid": "60084240-cc60-4e07-9cae-3f7d36559e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.834193] env[62519]: INFO nova.compute.manager [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Rebuilding instance [ 1450.910760] env[62519]: DEBUG nova.compute.manager [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1450.910760] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29b2aa4-afab-4016-b3fe-11106e0ff1e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.009636] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.088208] env[62519]: INFO nova.compute.manager [-] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Took 1.52 seconds to deallocate network for instance. [ 1451.107936] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802088, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.109152] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Successfully created port: a8a61a1e-f910-4564-b6af-8507d8141e45 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1451.330372] env[62519]: DEBUG oslo_concurrency.lockutils [req-29f17e2f-6482-4674-98bb-2fe008a6e4cc req-48cb40ba-7092-454f-9965-05e5e8b4f63e service nova] Releasing lock "refresh_cache-021c3287-5887-457e-9b3a-233308fb9b23" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.415594] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b5bafa-f524-4d46-b6bc-90843d9aa86d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.422067] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ae359c-fa39-4285-b1bb-ffe29ac0bf2e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.461335] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1451.462132] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef63463-0e47-4dd7-b8e3-b526b36260c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.465136] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-747b3795-ad19-486b-abf7-a02615ddd50f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.478030] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18efed01-e52b-41d1-915d-102c9e67cbe1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.479945] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1451.479945] env[62519]: value = "task-1802089" [ 1451.479945] env[62519]: _type = "Task" [ 1451.479945] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.480574] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2cdb3cee-efd6-4022-b37b-5545123251f0 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "49221ea3-d457-4cf5-97a9-9ae74c4e86fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.156s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.499232] env[62519]: DEBUG nova.compute.provider_tree [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.505580] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802089, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.593518] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.607422] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543719} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.607710] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 021c3287-5887-457e-9b3a-233308fb9b23/021c3287-5887-457e-9b3a-233308fb9b23.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1451.607936] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1451.608147] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2cf105a9-abe7-4111-ba25-881a5e8d6605 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.614344] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1451.614344] env[62519]: value = "task-1802090" [ 1451.614344] env[62519]: _type = "Task" [ 1451.614344] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.623023] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802090, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.768078] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1451.808599] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1451.808838] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1451.808989] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1451.810354] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1451.810508] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1451.810655] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1451.810858] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1451.811085] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1451.811334] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1451.811571] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1451.811696] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1451.812588] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df5f88c-62d1-4e23-8257-a5e94411a8de {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.821646] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cc09d2-b0ca-4923-a078-e62fc37f371f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.941641] env[62519]: DEBUG nova.compute.manager [req-5a26616d-ae9a-4871-b38f-5f30c12eeb5b req-01df74b6-0bea-49f2-b795-b5c9a742fd64 service nova] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Received event network-vif-deleted-c5d604f1-fa0e-433c-af5c-5e2116499fe1 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1451.988328] env[62519]: DEBUG nova.compute.manager [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1451.995204] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802089, 'name': PowerOffVM_Task, 'duration_secs': 0.258064} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.995693] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1451.996021] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1451.996825] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd988d5-9547-4896-ba86-d291a3ab4edd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.004806] env[62519]: DEBUG nova.scheduler.client.report [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1452.008366] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1452.009046] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4daebd4e-427c-4968-b51e-ffce87347ab1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.033825] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1452.033825] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1452.033825] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Deleting the datastore file [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1452.034089] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1367040a-7a69-4e51-bd5c-6e9fd130870a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.040562] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1452.040562] env[62519]: value = "task-1802092" [ 1452.040562] env[62519]: _type = "Task" [ 1452.040562] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.051208] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.125025] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802090, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164125} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.125122] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1452.125967] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590f4b76-5757-4c68-99ff-0abdbb5d3559 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.149756] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 021c3287-5887-457e-9b3a-233308fb9b23/021c3287-5887-457e-9b3a-233308fb9b23.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1452.150948] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2db8a27a-8fe6-4a34-a97a-fe558c09f534 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.174174] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1452.174174] env[62519]: value = "task-1802093" [ 1452.174174] env[62519]: _type = "Task" [ 1452.174174] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.182459] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802093, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.419898] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "8070aa59-3547-460a-b914-0e84620023d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.420189] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "8070aa59-3547-460a-b914-0e84620023d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.513718] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.773s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.514368] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1452.518190] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.924s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.518190] env[62519]: DEBUG nova.objects.instance [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lazy-loading 'resources' on Instance uuid ceadcb5e-ee82-4441-b046-f79b973ec05e {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1452.523605] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.551650] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115709} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.554623] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1452.554623] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1452.554623] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1452.683916] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802093, 'name': ReconfigVM_Task, 'duration_secs': 0.340784} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.684513] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 021c3287-5887-457e-9b3a-233308fb9b23/021c3287-5887-457e-9b3a-233308fb9b23.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1452.685166] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df94a0de-e0cc-4a0f-b1a2-723c78d25ea9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.691647] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1452.691647] env[62519]: value = "task-1802094" [ 1452.691647] env[62519]: _type = "Task" [ 1452.691647] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.703360] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802094, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.850576] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "27f9e890-4733-43aa-9bf1-351d42d75418" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.850816] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "27f9e890-4733-43aa-9bf1-351d42d75418" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.867766] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Successfully updated port: a8a61a1e-f910-4564-b6af-8507d8141e45 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1453.023286] env[62519]: DEBUG nova.compute.utils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1453.032516] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1453.034746] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1453.104760] env[62519]: DEBUG nova.policy [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05b2585b9472460ab5d30c3451ff31ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bde061b2dd1945bdae6c789325741ade', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1453.134903] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquiring lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.135833] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.201866] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802094, 'name': Rename_Task, 'duration_secs': 0.147638} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.204438] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1453.204925] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c019179-1497-4a8e-a486-54585d06710c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.211403] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1453.211403] env[62519]: value = "task-1802095" [ 1453.211403] env[62519]: _type = "Task" [ 1453.211403] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.220291] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.269184] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.370655] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "refresh_cache-56790738-4759-468a-9f43-f9c2bc2de23a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.370655] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired lock "refresh_cache-56790738-4759-468a-9f43-f9c2bc2de23a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.371613] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1453.379323] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Successfully created port: 45a2d062-302a-4749-81e8-8a16c4726fe2 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1453.532898] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1453.599886] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1453.600804] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1453.600804] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1453.600804] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1453.600804] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1453.600804] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1453.600804] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1453.601142] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1453.601142] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1453.601299] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1453.601517] env[62519]: DEBUG nova.virt.hardware [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1453.603864] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60811aa8-9141-4afb-bf37-0d9e9eb8e35e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.611793] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.611877] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.618319] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d709a98-59b4-4ebd-90f7-8b075560a59c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.639406] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1453.645424] env[62519]: DEBUG oslo.service.loopingcall [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.646725] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1453.647465] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4237760-e7ef-454e-9af9-64934920652b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.650266] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3af9a74f-f999-467b-a59f-1ab872189771 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.672348] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b718771c-042c-4784-aaa6-ba96178769ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.677098] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1453.677098] env[62519]: value = "task-1802096" [ 1453.677098] env[62519]: _type = "Task" [ 1453.677098] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.705237] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba8a264-c8bf-4634-adf4-6b983ec013ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.710275] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802096, 'name': CreateVM_Task} progress is 15%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.718170] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc779ce-48eb-43dd-bdde-9a39544c51ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.728412] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802095, 'name': PowerOnVM_Task, 'duration_secs': 0.488328} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.736099] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1453.736282] env[62519]: INFO nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Took 7.01 seconds to spawn the instance on the hypervisor. [ 1453.736470] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1453.741155] env[62519]: DEBUG nova.compute.provider_tree [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.741155] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a74be6-ca4e-4391-b83b-76a73cb219af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.068453] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1454.092378] env[62519]: DEBUG nova.compute.manager [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Received event network-vif-plugged-a8a61a1e-f910-4564-b6af-8507d8141e45 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1454.092580] env[62519]: DEBUG oslo_concurrency.lockutils [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] Acquiring lock "56790738-4759-468a-9f43-f9c2bc2de23a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.092783] env[62519]: DEBUG oslo_concurrency.lockutils [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] Lock "56790738-4759-468a-9f43-f9c2bc2de23a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.092942] env[62519]: DEBUG oslo_concurrency.lockutils [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] Lock "56790738-4759-468a-9f43-f9c2bc2de23a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.095062] env[62519]: DEBUG nova.compute.manager [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] No waiting events found dispatching network-vif-plugged-a8a61a1e-f910-4564-b6af-8507d8141e45 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1454.095062] env[62519]: WARNING nova.compute.manager [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Received unexpected event network-vif-plugged-a8a61a1e-f910-4564-b6af-8507d8141e45 for instance with vm_state building and task_state spawning. [ 1454.095062] env[62519]: DEBUG nova.compute.manager [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Received event network-changed-a8a61a1e-f910-4564-b6af-8507d8141e45 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1454.095062] env[62519]: DEBUG nova.compute.manager [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Refreshing instance network info cache due to event network-changed-a8a61a1e-f910-4564-b6af-8507d8141e45. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1454.095062] env[62519]: DEBUG oslo_concurrency.lockutils [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] Acquiring lock "refresh_cache-56790738-4759-468a-9f43-f9c2bc2de23a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.187277] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802096, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.223042] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Updating instance_info_cache with network_info: [{"id": "a8a61a1e-f910-4564-b6af-8507d8141e45", "address": "fa:16:3e:86:a0:8d", "network": {"id": "61c38727-028c-44d6-ad51-8b8fe4c3b789", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1533919301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde061b2dd1945bdae6c789325741ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a61a1e-f9", "ovs_interfaceid": "a8a61a1e-f910-4564-b6af-8507d8141e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.243258] env[62519]: DEBUG nova.scheduler.client.report [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1454.256559] env[62519]: INFO nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Took 36.96 seconds to build instance. [ 1454.545027] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1454.570273] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1454.570513] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1454.570666] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1454.570842] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1454.570984] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1454.574682] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1454.575044] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1454.575438] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1454.575438] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1454.575616] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1454.575788] env[62519]: DEBUG nova.virt.hardware [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1454.577266] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dead809-0138-4ff7-bf4e-d9a479e6d19e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.587304] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2ed826-102b-448d-b0a9-580d4e9b972f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.688395] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802096, 'name': CreateVM_Task, 'duration_secs': 0.593685} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.689021] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1454.689218] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.689552] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.689927] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1454.692109] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-927417b8-4a6f-4046-ad47-f621a717deae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.695215] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1454.695215] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e10786-b153-a40d-1f6c-56a5157371a3" [ 1454.695215] env[62519]: _type = "Task" [ 1454.695215] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.704192] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e10786-b153-a40d-1f6c-56a5157371a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.726370] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Releasing lock "refresh_cache-56790738-4759-468a-9f43-f9c2bc2de23a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.727129] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Instance network_info: |[{"id": "a8a61a1e-f910-4564-b6af-8507d8141e45", "address": "fa:16:3e:86:a0:8d", "network": {"id": "61c38727-028c-44d6-ad51-8b8fe4c3b789", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1533919301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde061b2dd1945bdae6c789325741ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a61a1e-f9", "ovs_interfaceid": "a8a61a1e-f910-4564-b6af-8507d8141e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1454.727129] env[62519]: DEBUG oslo_concurrency.lockutils [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] Acquired lock "refresh_cache-56790738-4759-468a-9f43-f9c2bc2de23a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.727317] env[62519]: DEBUG nova.network.neutron [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Refreshing network info cache for port a8a61a1e-f910-4564-b6af-8507d8141e45 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1454.728582] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:a0:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c7821ea-f92f-4f06-a4cb-05e1186a9d22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8a61a1e-f910-4564-b6af-8507d8141e45', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1454.739598] env[62519]: DEBUG oslo.service.loopingcall [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1454.739598] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1454.739598] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7784777-c699-4d43-86fa-897037c84795 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.757615] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.240s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.760263] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.051s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.762796] env[62519]: INFO nova.compute.claims [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1454.767981] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.768291] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.776055] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1454.776055] env[62519]: value = "task-1802097" [ 1454.776055] env[62519]: _type = "Task" [ 1454.776055] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.788023] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802097, 'name': CreateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.794892] env[62519]: INFO nova.scheduler.client.report [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Deleted allocations for instance ceadcb5e-ee82-4441-b046-f79b973ec05e [ 1454.919474] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Successfully updated port: 45a2d062-302a-4749-81e8-8a16c4726fe2 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1455.206205] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e10786-b153-a40d-1f6c-56a5157371a3, 'name': SearchDatastore_Task, 'duration_secs': 0.030141} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.206555] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.206793] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1455.207035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.207223] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.207421] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1455.207678] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e076858b-c0f0-4049-8408-d79dc2603ce9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.216837] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1455.217015] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1455.217806] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c457dd2f-adb2-4bbd-b8e2-2cf6dad09716 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.223169] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1455.223169] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52191e16-f8fa-f1fd-47ff-228967978eb1" [ 1455.223169] env[62519]: _type = "Task" [ 1455.223169] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.230752] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52191e16-f8fa-f1fd-47ff-228967978eb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.292076] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802097, 'name': CreateVM_Task, 'duration_secs': 0.395569} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.292245] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1455.292934] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.293140] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.293412] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1455.293654] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31b8a262-72dc-42bd-a4dd-805f9b5bfe5d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.301327] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1455.301327] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525d1402-170c-83f1-f05f-283d29c6d060" [ 1455.301327] env[62519]: _type = "Task" [ 1455.301327] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.301727] env[62519]: DEBUG oslo_concurrency.lockutils [None req-36a89f82-8295-4590-9f5f-922976f5713d tempest-ServerDiagnosticsV248Test-635217058 tempest-ServerDiagnosticsV248Test-635217058-project-member] Lock "ceadcb5e-ee82-4441-b046-f79b973ec05e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.473s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.310909] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525d1402-170c-83f1-f05f-283d29c6d060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.424845] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "refresh_cache-3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.426333] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired lock "refresh_cache-3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.426333] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1455.496766] env[62519]: DEBUG nova.network.neutron [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Updated VIF entry in instance network info cache for port a8a61a1e-f910-4564-b6af-8507d8141e45. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1455.497192] env[62519]: DEBUG nova.network.neutron [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Updating instance_info_cache with network_info: [{"id": "a8a61a1e-f910-4564-b6af-8507d8141e45", "address": "fa:16:3e:86:a0:8d", "network": {"id": "61c38727-028c-44d6-ad51-8b8fe4c3b789", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1533919301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde061b2dd1945bdae6c789325741ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a61a1e-f9", "ovs_interfaceid": "a8a61a1e-f910-4564-b6af-8507d8141e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.734899] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52191e16-f8fa-f1fd-47ff-228967978eb1, 'name': SearchDatastore_Task, 'duration_secs': 0.033147} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.735523] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d45d3dcd-bd18-429d-ad95-764b5dba54b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.741427] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1455.741427] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5233bcbf-f8c3-82f7-3fab-7d0407c32554" [ 1455.741427] env[62519]: _type = "Task" [ 1455.741427] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.749923] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5233bcbf-f8c3-82f7-3fab-7d0407c32554, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.761467] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.773466] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "021c3287-5887-457e-9b3a-233308fb9b23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.219s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.817530] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525d1402-170c-83f1-f05f-283d29c6d060, 'name': SearchDatastore_Task, 'duration_secs': 0.012666} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.818130] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.818369] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1455.819026] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.961310] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1456.005251] env[62519]: DEBUG oslo_concurrency.lockutils [req-d48a5ab4-b4a1-4398-b7d7-bfddd9c99390 req-f6c7e44d-ab16-4e4e-8a24-4243db38c370 service nova] Releasing lock "refresh_cache-56790738-4759-468a-9f43-f9c2bc2de23a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.112876] env[62519]: DEBUG nova.network.neutron [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Updating instance_info_cache with network_info: [{"id": "45a2d062-302a-4749-81e8-8a16c4726fe2", "address": "fa:16:3e:8d:26:8f", "network": {"id": "61c38727-028c-44d6-ad51-8b8fe4c3b789", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1533919301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde061b2dd1945bdae6c789325741ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45a2d062-30", "ovs_interfaceid": "45a2d062-302a-4749-81e8-8a16c4726fe2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.126348] env[62519]: DEBUG nova.compute.manager [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Received event network-vif-plugged-45a2d062-302a-4749-81e8-8a16c4726fe2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1456.126607] env[62519]: DEBUG oslo_concurrency.lockutils [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] Acquiring lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.126822] env[62519]: DEBUG oslo_concurrency.lockutils [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] Lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1456.126990] env[62519]: DEBUG oslo_concurrency.lockutils [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] Lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1456.127181] env[62519]: DEBUG nova.compute.manager [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] No waiting events found dispatching network-vif-plugged-45a2d062-302a-4749-81e8-8a16c4726fe2 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1456.127350] env[62519]: WARNING nova.compute.manager [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Received unexpected event network-vif-plugged-45a2d062-302a-4749-81e8-8a16c4726fe2 for instance with vm_state building and task_state spawning. [ 1456.127516] env[62519]: DEBUG nova.compute.manager [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Received event network-changed-45a2d062-302a-4749-81e8-8a16c4726fe2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1456.127676] env[62519]: DEBUG nova.compute.manager [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Refreshing instance network info cache due to event network-changed-45a2d062-302a-4749-81e8-8a16c4726fe2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1456.127842] env[62519]: DEBUG oslo_concurrency.lockutils [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] Acquiring lock "refresh_cache-3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1456.254486] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5233bcbf-f8c3-82f7-3fab-7d0407c32554, 'name': SearchDatastore_Task, 'duration_secs': 0.029835} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.254783] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.255034] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb/49221ea3-d457-4cf5-97a9-9ae74c4e86fb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1456.255314] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.255534] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1456.256112] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2e0dcf7-ae21-4bfc-afe7-886568bbcd97 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.260166] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f5f72ac-8d3f-4cef-8f7c-ac97c028aed4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.264964] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.268280] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1456.268280] env[62519]: value = "task-1802098" [ 1456.268280] env[62519]: _type = "Task" [ 1456.268280] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.273366] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1456.273472] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1456.277133] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-432f19bf-55ae-41cc-a5e0-f2b1d01993f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.279581] env[62519]: DEBUG nova.compute.manager [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1456.287079] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.291312] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1456.291312] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521e7be2-53ab-f956-4e0b-a61114cefd90" [ 1456.291312] env[62519]: _type = "Task" [ 1456.291312] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.303844] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521e7be2-53ab-f956-4e0b-a61114cefd90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.336278] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3473396c-65ea-4aca-bd1e-936ca88da024 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.346101] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133279ea-a62e-4dcb-80e6-00faeba96bda {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.380827] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19024aee-b474-4024-86ed-e3cee0b8bce1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.389304] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c6281a-1434-42df-9404-1b4ab5343f9e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.405681] env[62519]: DEBUG nova.compute.provider_tree [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1456.615298] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Releasing lock "refresh_cache-3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.615730] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Instance network_info: |[{"id": "45a2d062-302a-4749-81e8-8a16c4726fe2", "address": "fa:16:3e:8d:26:8f", "network": {"id": "61c38727-028c-44d6-ad51-8b8fe4c3b789", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1533919301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde061b2dd1945bdae6c789325741ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45a2d062-30", "ovs_interfaceid": "45a2d062-302a-4749-81e8-8a16c4726fe2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1456.616072] env[62519]: DEBUG oslo_concurrency.lockutils [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] Acquired lock "refresh_cache-3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.616260] env[62519]: DEBUG nova.network.neutron [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Refreshing network info cache for port 45a2d062-302a-4749-81e8-8a16c4726fe2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1456.618100] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:26:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c7821ea-f92f-4f06-a4cb-05e1186a9d22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45a2d062-302a-4749-81e8-8a16c4726fe2', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1456.626954] env[62519]: DEBUG oslo.service.loopingcall [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1456.627774] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1456.628339] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba27d04b-0c09-475b-8db6-0f6636a11c74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.649868] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1456.649868] env[62519]: value = "task-1802099" [ 1456.649868] env[62519]: _type = "Task" [ 1456.649868] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.659330] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802099, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.780150] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802098, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.804287] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521e7be2-53ab-f956-4e0b-a61114cefd90, 'name': SearchDatastore_Task, 'duration_secs': 0.010576} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.805013] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b153d382-4446-4154-87f7-c1f567bda36c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.811140] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1456.811140] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520ebc78-dc67-8c79-a9f4-73171d19a33d" [ 1456.811140] env[62519]: _type = "Task" [ 1456.811140] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.817315] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.820549] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520ebc78-dc67-8c79-a9f4-73171d19a33d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.909433] env[62519]: DEBUG nova.scheduler.client.report [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1457.161297] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802099, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.279828] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802098, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.69501} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.280100] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb/49221ea3-d457-4cf5-97a9-9ae74c4e86fb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1457.280309] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1457.280547] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49eb7935-82bb-46e1-915b-2719e701e933 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.287024] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1457.287024] env[62519]: value = "task-1802100" [ 1457.287024] env[62519]: _type = "Task" [ 1457.287024] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.295675] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802100, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.322386] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520ebc78-dc67-8c79-a9f4-73171d19a33d, 'name': SearchDatastore_Task, 'duration_secs': 0.079968} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.322386] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.322567] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 56790738-4759-468a-9f43-f9c2bc2de23a/56790738-4759-468a-9f43-f9c2bc2de23a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1457.322850] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a4a00a8-572c-4276-b27b-876e4718a99f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.330298] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1457.330298] env[62519]: value = "task-1802101" [ 1457.330298] env[62519]: _type = "Task" [ 1457.330298] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.340116] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.370540] env[62519]: DEBUG nova.network.neutron [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Updated VIF entry in instance network info cache for port 45a2d062-302a-4749-81e8-8a16c4726fe2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1457.370927] env[62519]: DEBUG nova.network.neutron [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Updating instance_info_cache with network_info: [{"id": "45a2d062-302a-4749-81e8-8a16c4726fe2", "address": "fa:16:3e:8d:26:8f", "network": {"id": "61c38727-028c-44d6-ad51-8b8fe4c3b789", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1533919301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bde061b2dd1945bdae6c789325741ade", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45a2d062-30", "ovs_interfaceid": "45a2d062-302a-4749-81e8-8a16c4726fe2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.415326] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.655s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.415963] env[62519]: DEBUG nova.compute.manager [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1457.419388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.498s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.423226] env[62519]: INFO nova.compute.claims [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1457.664009] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802099, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.797554] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802100, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067297} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.797871] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1457.798786] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94902a64-50a7-4f69-be00-9500f1f125e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.827922] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb/49221ea3-d457-4cf5-97a9-9ae74c4e86fb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1457.828255] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eea5cc1a-faad-414b-85f5-d8666772f6dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.852977] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.854294] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1457.854294] env[62519]: value = "task-1802102" [ 1457.854294] env[62519]: _type = "Task" [ 1457.854294] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.861730] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802102, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.873552] env[62519]: DEBUG oslo_concurrency.lockutils [req-7bf0ff24-22a1-4dd0-981b-e7889b38895d req-dcbc9af5-4de3-484d-9c73-48efae58da0a service nova] Releasing lock "refresh_cache-3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.929574] env[62519]: DEBUG nova.compute.utils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1457.931917] env[62519]: DEBUG nova.compute.manager [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1457.932275] env[62519]: DEBUG nova.network.neutron [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1457.980110] env[62519]: DEBUG nova.policy [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2801374c70c44a0b8d850798278595a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '55bcb317741740ab86883dc9d0409fa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1458.168687] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802099, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.266154] env[62519]: DEBUG nova.network.neutron [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Successfully created port: 57170226-5721-470d-8c8b-652ddff02ff6 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1458.354171] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.363244] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802102, 'name': ReconfigVM_Task, 'duration_secs': 0.28321} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.366086] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb/49221ea3-d457-4cf5-97a9-9ae74c4e86fb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1458.366086] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b10f77c-4152-46cc-b264-1179d281ebf0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.370628] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1458.370628] env[62519]: value = "task-1802103" [ 1458.370628] env[62519]: _type = "Task" [ 1458.370628] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.381899] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802103, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.432782] env[62519]: DEBUG nova.compute.manager [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1458.665333] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802099, 'name': CreateVM_Task, 'duration_secs': 1.726511} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.665554] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1458.666570] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.666730] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.667080] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1458.667358] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b52357ef-d7e4-4ad0-a2ab-5042d0786dd8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.679239] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1458.679239] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e88e65-5ba5-b9e7-40af-41f998ac057b" [ 1458.679239] env[62519]: _type = "Task" [ 1458.679239] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.691271] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e88e65-5ba5-b9e7-40af-41f998ac057b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.854409] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802101, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.883380] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802103, 'name': Rename_Task, 'duration_secs': 0.127148} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.883707] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1458.883967] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dcaed4b2-590c-4433-89ca-0ac4b8eaf6d8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.894376] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1458.894376] env[62519]: value = "task-1802104" [ 1458.894376] env[62519]: _type = "Task" [ 1458.894376] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.906392] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.087235] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d8d34d-4cae-4211-81a8-950a8b22a9d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.095513] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88e432e-25b4-41f1-a580-27e9f6722a17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.135667] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96b9820-c5f6-41e0-84e1-5b7841cb8603 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.143691] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dd60b8-0a54-4625-8e53-686f70aeccac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.163114] env[62519]: DEBUG nova.compute.provider_tree [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1459.189428] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e88e65-5ba5-b9e7-40af-41f998ac057b, 'name': SearchDatastore_Task, 'duration_secs': 0.091682} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.189832] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.190122] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1459.190474] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.190579] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.190798] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1459.191120] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95536854-3aa6-4a7b-9840-f6ac990fbab9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.200521] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1459.200756] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1459.201615] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90d0134d-88fc-409c-b3db-b1b639739502 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.207985] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1459.207985] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5213851e-6b05-62a7-bcab-0c23e7e6b62d" [ 1459.207985] env[62519]: _type = "Task" [ 1459.207985] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.221403] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5213851e-6b05-62a7-bcab-0c23e7e6b62d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.358881] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802101, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.64659} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.358881] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 56790738-4759-468a-9f43-f9c2bc2de23a/56790738-4759-468a-9f43-f9c2bc2de23a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1459.358881] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1459.358881] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac828561-925a-4778-84c8-e9293dbdcf8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.366217] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1459.366217] env[62519]: value = "task-1802105" [ 1459.366217] env[62519]: _type = "Task" [ 1459.366217] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.375162] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802105, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.404575] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802104, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.447028] env[62519]: DEBUG nova.compute.manager [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1459.473107] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1459.473360] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1459.473516] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1459.473702] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1459.473845] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1459.473989] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1459.475231] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1459.475411] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1459.475808] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1459.475986] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1459.476184] env[62519]: DEBUG nova.virt.hardware [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1459.477114] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28599a81-376b-4142-b8d8-405ca01022c6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.485635] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29512b5b-92b8-40ab-b3d6-3785ec04b8ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.666566] env[62519]: DEBUG nova.scheduler.client.report [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1459.722625] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5213851e-6b05-62a7-bcab-0c23e7e6b62d, 'name': SearchDatastore_Task, 'duration_secs': 0.010759} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.723469] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc644a4c-5313-42ad-bdf0-87a0ba28f96e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.730188] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1459.730188] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d2196f-29ef-06cf-9f17-37ce95b64259" [ 1459.730188] env[62519]: _type = "Task" [ 1459.730188] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.739617] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d2196f-29ef-06cf-9f17-37ce95b64259, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.879979] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802105, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069905} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.881186] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1459.881186] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9458749e-f922-4036-9895-5edd83cf4c15 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.904495] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 56790738-4759-468a-9f43-f9c2bc2de23a/56790738-4759-468a-9f43-f9c2bc2de23a.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1459.907638] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dce667fa-f564-4b03-a6f9-b6e47c5bd790 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.931208] env[62519]: DEBUG oslo_vmware.api [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802104, 'name': PowerOnVM_Task, 'duration_secs': 0.726285} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.932613] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1459.932875] env[62519]: DEBUG nova.compute.manager [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1459.933231] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1459.933231] env[62519]: value = "task-1802106" [ 1459.933231] env[62519]: _type = "Task" [ 1459.933231] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.933928] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3484a23-653d-494d-aa9f-6a8bdbb79776 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.944009] env[62519]: DEBUG nova.compute.manager [req-1aa28b99-d515-4be6-83d7-83de6a25c5c5 req-ceceea09-93fd-41e6-8142-21c2308c3497 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Received event network-vif-plugged-57170226-5721-470d-8c8b-652ddff02ff6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1459.946561] env[62519]: DEBUG oslo_concurrency.lockutils [req-1aa28b99-d515-4be6-83d7-83de6a25c5c5 req-ceceea09-93fd-41e6-8142-21c2308c3497 service nova] Acquiring lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.946803] env[62519]: DEBUG oslo_concurrency.lockutils [req-1aa28b99-d515-4be6-83d7-83de6a25c5c5 req-ceceea09-93fd-41e6-8142-21c2308c3497 service nova] Lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.947429] env[62519]: DEBUG oslo_concurrency.lockutils [req-1aa28b99-d515-4be6-83d7-83de6a25c5c5 req-ceceea09-93fd-41e6-8142-21c2308c3497 service nova] Lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.947429] env[62519]: DEBUG nova.compute.manager [req-1aa28b99-d515-4be6-83d7-83de6a25c5c5 req-ceceea09-93fd-41e6-8142-21c2308c3497 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] No waiting events found dispatching network-vif-plugged-57170226-5721-470d-8c8b-652ddff02ff6 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1459.947429] env[62519]: WARNING nova.compute.manager [req-1aa28b99-d515-4be6-83d7-83de6a25c5c5 req-ceceea09-93fd-41e6-8142-21c2308c3497 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Received unexpected event network-vif-plugged-57170226-5721-470d-8c8b-652ddff02ff6 for instance with vm_state building and task_state spawning. [ 1459.957606] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802106, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.040590] env[62519]: DEBUG nova.network.neutron [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Successfully updated port: 57170226-5721-470d-8c8b-652ddff02ff6 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1460.172544] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.173107] env[62519]: DEBUG nova.compute.manager [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1460.177372] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.729s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.177372] env[62519]: DEBUG nova.objects.instance [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lazy-loading 'resources' on Instance uuid 31f55ece-82e6-40ad-ad7c-1af645f307bf {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1460.241402] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d2196f-29ef-06cf-9f17-37ce95b64259, 'name': SearchDatastore_Task, 'duration_secs': 0.043796} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.241655] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.241912] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c/3aab3adb-b28b-45dd-880f-b1cfbaeeed0c.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1460.242193] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-122cf4ea-04af-4c84-9e4d-4e135b9807bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.249799] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1460.249799] env[62519]: value = "task-1802107" [ 1460.249799] env[62519]: _type = "Task" [ 1460.249799] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.257727] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.446705] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802106, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.467092] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.545164] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquiring lock "refresh_cache-4c336ad1-8ce6-4f89-843e-0baae0d0dbda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.545297] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquired lock "refresh_cache-4c336ad1-8ce6-4f89-843e-0baae0d0dbda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.545496] env[62519]: DEBUG nova.network.neutron [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1460.680662] env[62519]: DEBUG nova.compute.utils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1460.689794] env[62519]: DEBUG nova.compute.manager [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1460.690489] env[62519]: DEBUG nova.network.neutron [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1460.761033] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802107, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.771405] env[62519]: DEBUG nova.policy [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba6b3804236d423cb5b9590468e6bf89', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edcdc78fe2504bebb5c834930b20d32e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1460.952969] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802106, 'name': ReconfigVM_Task, 'duration_secs': 0.842818} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.957446] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 56790738-4759-468a-9f43-f9c2bc2de23a/56790738-4759-468a-9f43-f9c2bc2de23a.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1460.959185] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d66aa77-b3e7-4e5e-8d92-83112b3f5366 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.970547] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1460.970547] env[62519]: value = "task-1802108" [ 1460.970547] env[62519]: _type = "Task" [ 1460.970547] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.990916] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802108, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.100301] env[62519]: DEBUG nova.network.neutron [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1461.146245] env[62519]: DEBUG nova.network.neutron [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Successfully created port: 0c70e934-3a08-4103-95df-ba8f3db80e91 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1461.190299] env[62519]: DEBUG nova.compute.manager [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1461.265484] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802107, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.771155} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.266106] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c/3aab3adb-b28b-45dd-880f-b1cfbaeeed0c.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1461.266575] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1461.267700] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a98a828-42df-41c5-92e9-488e32cec345 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.274609] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1461.274609] env[62519]: value = "task-1802109" [ 1461.274609] env[62519]: _type = "Task" [ 1461.274609] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.287464] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802109, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.378198] env[62519]: DEBUG nova.network.neutron [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Updating instance_info_cache with network_info: [{"id": "57170226-5721-470d-8c8b-652ddff02ff6", "address": "fa:16:3e:bd:61:12", "network": {"id": "aa9fff60-aac4-4eb8-8e44-f3d41384fdd4", "bridge": "br-int", "label": "tempest-ServersTestJSON-287335508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55bcb317741740ab86883dc9d0409fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57170226-57", "ovs_interfaceid": "57170226-5721-470d-8c8b-652ddff02ff6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.387114] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79595a1e-23a1-434e-b3d5-e5d0bc1d695f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.398918] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a064cd60-dc37-4265-a412-500ebcab6887 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.431353] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8d6089-4a2e-44f0-a67a-cafc614013df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.440128] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ead1b3-fa3a-4d16-9dcd-14647f41ff4d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.443981] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "49221ea3-d457-4cf5-97a9-9ae74c4e86fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.444321] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "49221ea3-d457-4cf5-97a9-9ae74c4e86fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.444485] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "49221ea3-d457-4cf5-97a9-9ae74c4e86fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.444667] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "49221ea3-d457-4cf5-97a9-9ae74c4e86fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.444827] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "49221ea3-d457-4cf5-97a9-9ae74c4e86fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.447978] env[62519]: INFO nova.compute.manager [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Terminating instance [ 1461.457800] env[62519]: DEBUG nova.compute.provider_tree [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1461.480860] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802108, 'name': Rename_Task, 'duration_secs': 0.17365} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.483356] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1461.483356] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8d2ad3b-64b6-4568-82a3-ab52c0d6615a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.488440] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1461.488440] env[62519]: value = "task-1802113" [ 1461.488440] env[62519]: _type = "Task" [ 1461.488440] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.498790] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802113, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.788511] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802109, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073446} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.788782] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1461.792746] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd137ec-ffc5-4376-88e5-b3ff6bc0110d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.814728] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c/3aab3adb-b28b-45dd-880f-b1cfbaeeed0c.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1461.815018] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc27b30a-ee8a-4b05-a574-316fcc072f5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.835253] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1461.835253] env[62519]: value = "task-1802115" [ 1461.835253] env[62519]: _type = "Task" [ 1461.835253] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.843972] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802115, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.881317] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Releasing lock "refresh_cache-4c336ad1-8ce6-4f89-843e-0baae0d0dbda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.881672] env[62519]: DEBUG nova.compute.manager [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Instance network_info: |[{"id": "57170226-5721-470d-8c8b-652ddff02ff6", "address": "fa:16:3e:bd:61:12", "network": {"id": "aa9fff60-aac4-4eb8-8e44-f3d41384fdd4", "bridge": "br-int", "label": "tempest-ServersTestJSON-287335508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55bcb317741740ab86883dc9d0409fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57170226-57", "ovs_interfaceid": "57170226-5721-470d-8c8b-652ddff02ff6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1461.882111] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:61:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d62c1cf-f39a-4626-9552-f1e13c692636', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57170226-5721-470d-8c8b-652ddff02ff6', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1461.889882] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Creating folder: Project (55bcb317741740ab86883dc9d0409fa5). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1461.890196] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-929de73c-2708-4e59-9b1b-581c9c2ecb2e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.900632] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Created folder: Project (55bcb317741740ab86883dc9d0409fa5) in parent group-v373567. [ 1461.900863] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Creating folder: Instances. Parent ref: group-v373634. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1461.901154] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c3324bb-5646-4b05-8e4d-365dc01371c6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.914393] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Created folder: Instances in parent group-v373634. [ 1461.914671] env[62519]: DEBUG oslo.service.loopingcall [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1461.914903] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1461.915155] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1100237e-8c5c-4093-a0d0-17fe2be9f9e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.934912] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1461.934912] env[62519]: value = "task-1802118" [ 1461.934912] env[62519]: _type = "Task" [ 1461.934912] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.943114] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802118, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.961423] env[62519]: DEBUG nova.scheduler.client.report [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1461.965336] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "refresh_cache-49221ea3-d457-4cf5-97a9-9ae74c4e86fb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.965608] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired lock "refresh_cache-49221ea3-d457-4cf5-97a9-9ae74c4e86fb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.965906] env[62519]: DEBUG nova.network.neutron [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1461.969581] env[62519]: DEBUG nova.compute.manager [req-13e18680-8ffe-45af-ab1a-16b3655c2238 req-368c6f5a-2e89-4672-9d61-42eb39f82947 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Received event network-changed-57170226-5721-470d-8c8b-652ddff02ff6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1461.969772] env[62519]: DEBUG nova.compute.manager [req-13e18680-8ffe-45af-ab1a-16b3655c2238 req-368c6f5a-2e89-4672-9d61-42eb39f82947 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Refreshing instance network info cache due to event network-changed-57170226-5721-470d-8c8b-652ddff02ff6. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1461.970783] env[62519]: DEBUG oslo_concurrency.lockutils [req-13e18680-8ffe-45af-ab1a-16b3655c2238 req-368c6f5a-2e89-4672-9d61-42eb39f82947 service nova] Acquiring lock "refresh_cache-4c336ad1-8ce6-4f89-843e-0baae0d0dbda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.970783] env[62519]: DEBUG oslo_concurrency.lockutils [req-13e18680-8ffe-45af-ab1a-16b3655c2238 req-368c6f5a-2e89-4672-9d61-42eb39f82947 service nova] Acquired lock "refresh_cache-4c336ad1-8ce6-4f89-843e-0baae0d0dbda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.970783] env[62519]: DEBUG nova.network.neutron [req-13e18680-8ffe-45af-ab1a-16b3655c2238 req-368c6f5a-2e89-4672-9d61-42eb39f82947 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Refreshing network info cache for port 57170226-5721-470d-8c8b-652ddff02ff6 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1461.999183] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802113, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.200747] env[62519]: DEBUG nova.compute.manager [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1462.229772] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1462.230075] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1462.230256] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1462.230446] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1462.230592] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1462.230736] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1462.230938] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1462.231123] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1462.231333] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1462.231596] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1462.231817] env[62519]: DEBUG nova.virt.hardware [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1462.233247] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e06561-73c2-4179-b550-69d0cfada937 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.243975] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adddc8a2-3000-4292-ac5c-25f90bdf4a21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.345098] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802115, 'name': ReconfigVM_Task, 'duration_secs': 0.489888} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.345381] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c/3aab3adb-b28b-45dd-880f-b1cfbaeeed0c.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1462.346140] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae2710ed-0120-46a9-925a-d5f7aada6332 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.353332] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1462.353332] env[62519]: value = "task-1802119" [ 1462.353332] env[62519]: _type = "Task" [ 1462.353332] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.367195] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802119, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.444763] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802118, 'name': CreateVM_Task, 'duration_secs': 0.36383} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.444984] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1462.445776] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.445954] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.446316] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1462.446577] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d508484-26fa-4bfc-bc98-454ef446913f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.451044] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1462.451044] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52add74b-1c50-c255-2e9e-8ecb8efa328f" [ 1462.451044] env[62519]: _type = "Task" [ 1462.451044] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.458703] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52add74b-1c50-c255-2e9e-8ecb8efa328f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.468430] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.292s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.471028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.008s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.471028] env[62519]: DEBUG nova.objects.instance [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lazy-loading 'resources' on Instance uuid 8b178cc0-db79-4ec2-8962-f31b936f8eff {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1462.493319] env[62519]: DEBUG nova.network.neutron [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1462.496025] env[62519]: INFO nova.scheduler.client.report [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted allocations for instance 31f55ece-82e6-40ad-ad7c-1af645f307bf [ 1462.504073] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802113, 'name': PowerOnVM_Task, 'duration_secs': 0.797288} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.504580] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1462.504809] env[62519]: INFO nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Took 10.74 seconds to spawn the instance on the hypervisor. [ 1462.504957] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1462.505782] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00eb0b6-1c9e-4c39-8e50-2544f71622da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.588157] env[62519]: DEBUG nova.network.neutron [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.810104] env[62519]: DEBUG nova.network.neutron [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Successfully updated port: 0c70e934-3a08-4103-95df-ba8f3db80e91 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1462.865503] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802119, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.960848] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52add74b-1c50-c255-2e9e-8ecb8efa328f, 'name': SearchDatastore_Task, 'duration_secs': 0.033412} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.961170] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.961397] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1462.961620] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.961836] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.961931] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1462.962202] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb862cf8-ef75-47ec-b0ac-d9b3037a1d03 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.977975] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1462.978244] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1462.978995] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f0d2b42-0da1-4eee-9f1b-6c9429f4efec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.984108] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1462.984108] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5248b19c-6693-5597-bdec-37dbc85da838" [ 1462.984108] env[62519]: _type = "Task" [ 1462.984108] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.991534] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5248b19c-6693-5597-bdec-37dbc85da838, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.007836] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e8c6971a-d3e1-4194-9146-e4c243998a24 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "31f55ece-82e6-40ad-ad7c-1af645f307bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.977s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.024241] env[62519]: INFO nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Took 44.33 seconds to build instance. [ 1463.042657] env[62519]: DEBUG nova.network.neutron [req-13e18680-8ffe-45af-ab1a-16b3655c2238 req-368c6f5a-2e89-4672-9d61-42eb39f82947 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Updated VIF entry in instance network info cache for port 57170226-5721-470d-8c8b-652ddff02ff6. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1463.044078] env[62519]: DEBUG nova.network.neutron [req-13e18680-8ffe-45af-ab1a-16b3655c2238 req-368c6f5a-2e89-4672-9d61-42eb39f82947 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Updating instance_info_cache with network_info: [{"id": "57170226-5721-470d-8c8b-652ddff02ff6", "address": "fa:16:3e:bd:61:12", "network": {"id": "aa9fff60-aac4-4eb8-8e44-f3d41384fdd4", "bridge": "br-int", "label": "tempest-ServersTestJSON-287335508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55bcb317741740ab86883dc9d0409fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57170226-57", "ovs_interfaceid": "57170226-5721-470d-8c8b-652ddff02ff6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.093480] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Releasing lock "refresh_cache-49221ea3-d457-4cf5-97a9-9ae74c4e86fb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.094518] env[62519]: DEBUG nova.compute.manager [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1463.094518] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1463.094965] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0cd368-e7cb-461c-ae8d-eec12391f642 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.106419] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1463.106661] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca19111b-b13a-4492-9032-3f312faad0cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.114894] env[62519]: DEBUG oslo_vmware.api [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1463.114894] env[62519]: value = "task-1802120" [ 1463.114894] env[62519]: _type = "Task" [ 1463.114894] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.122768] env[62519]: DEBUG oslo_vmware.api [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.315093] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "refresh_cache-681ef7a9-3b24-450a-9034-6d30177995d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.318061] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquired lock "refresh_cache-681ef7a9-3b24-450a-9034-6d30177995d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.318061] env[62519]: DEBUG nova.network.neutron [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1463.365710] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802119, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.484591] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cb7ea0-407e-4085-a2c3-1a1f7442b37b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.498232] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcc1904-347b-421e-aedf-db25cd2114b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.501346] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5248b19c-6693-5597-bdec-37dbc85da838, 'name': SearchDatastore_Task, 'duration_secs': 0.022501} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.502434] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69a546c5-ecc5-4440-b7ae-7ea40bccac74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.534029] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deae5208-d74a-49c3-aca6-9ee320160c3c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.536935] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1463.536935] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524301fc-9a7f-3fe0-9111-8d4853b97d24" [ 1463.536935] env[62519]: _type = "Task" [ 1463.536935] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.545797] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163183b4-33e0-43af-ab52-a869c01064f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.553423] env[62519]: DEBUG oslo_concurrency.lockutils [req-13e18680-8ffe-45af-ab1a-16b3655c2238 req-368c6f5a-2e89-4672-9d61-42eb39f82947 service nova] Releasing lock "refresh_cache-4c336ad1-8ce6-4f89-843e-0baae0d0dbda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.553801] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524301fc-9a7f-3fe0-9111-8d4853b97d24, 'name': SearchDatastore_Task, 'duration_secs': 0.009985} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.554414] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.554657] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4c336ad1-8ce6-4f89-843e-0baae0d0dbda/4c336ad1-8ce6-4f89-843e-0baae0d0dbda.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1463.554904] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b68521cc-002d-40c3-951f-e0b556ac8aea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.564595] env[62519]: DEBUG nova.compute.provider_tree [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1463.571188] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1463.571188] env[62519]: value = "task-1802121" [ 1463.571188] env[62519]: _type = "Task" [ 1463.571188] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.582057] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.627210] env[62519]: DEBUG oslo_vmware.api [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802120, 'name': PowerOffVM_Task, 'duration_secs': 0.125688} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.627478] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1463.627645] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1463.627905] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0dcfb709-48f2-4963-9163-4777153c9914 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.657098] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1463.657098] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1463.657241] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Deleting the datastore file [datastore1] 49221ea3-d457-4cf5-97a9-9ae74c4e86fb {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1463.657482] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85ace800-7d20-4832-9fbf-d253a23ac9e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.663925] env[62519]: DEBUG oslo_vmware.api [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1463.663925] env[62519]: value = "task-1802123" [ 1463.663925] env[62519]: _type = "Task" [ 1463.663925] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.672244] env[62519]: DEBUG oslo_vmware.api [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.846023] env[62519]: DEBUG nova.network.neutron [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1463.864199] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802119, 'name': Rename_Task, 'duration_secs': 1.485368} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.866693] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1463.866981] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29a01ff2-7014-4217-a9ab-ad82e0f2cb7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.874229] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1463.874229] env[62519]: value = "task-1802124" [ 1463.874229] env[62519]: _type = "Task" [ 1463.874229] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.881815] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.072527] env[62519]: DEBUG nova.scheduler.client.report [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1464.090569] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802121, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.100427] env[62519]: DEBUG nova.compute.manager [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Received event network-vif-plugged-0c70e934-3a08-4103-95df-ba8f3db80e91 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1464.101135] env[62519]: DEBUG oslo_concurrency.lockutils [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] Acquiring lock "681ef7a9-3b24-450a-9034-6d30177995d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.101535] env[62519]: DEBUG oslo_concurrency.lockutils [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] Lock "681ef7a9-3b24-450a-9034-6d30177995d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.101838] env[62519]: DEBUG oslo_concurrency.lockutils [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] Lock "681ef7a9-3b24-450a-9034-6d30177995d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.102035] env[62519]: DEBUG nova.compute.manager [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] No waiting events found dispatching network-vif-plugged-0c70e934-3a08-4103-95df-ba8f3db80e91 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1464.102218] env[62519]: WARNING nova.compute.manager [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Received unexpected event network-vif-plugged-0c70e934-3a08-4103-95df-ba8f3db80e91 for instance with vm_state building and task_state spawning. [ 1464.102477] env[62519]: DEBUG nova.compute.manager [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Received event network-changed-0c70e934-3a08-4103-95df-ba8f3db80e91 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1464.102727] env[62519]: DEBUG nova.compute.manager [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Refreshing instance network info cache due to event network-changed-0c70e934-3a08-4103-95df-ba8f3db80e91. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1464.102865] env[62519]: DEBUG oslo_concurrency.lockutils [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] Acquiring lock "refresh_cache-681ef7a9-3b24-450a-9034-6d30177995d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.164405] env[62519]: DEBUG nova.network.neutron [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Updating instance_info_cache with network_info: [{"id": "0c70e934-3a08-4103-95df-ba8f3db80e91", "address": "fa:16:3e:a6:18:38", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c70e934-3a", "ovs_interfaceid": "0c70e934-3a08-4103-95df-ba8f3db80e91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.176691] env[62519]: DEBUG oslo_vmware.api [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095673} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.177025] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1464.177276] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1464.177353] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1464.177490] env[62519]: INFO nova.compute.manager [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1464.177742] env[62519]: DEBUG oslo.service.loopingcall [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1464.177953] env[62519]: DEBUG nova.compute.manager [-] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1464.178064] env[62519]: DEBUG nova.network.neutron [-] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1464.204845] env[62519]: DEBUG nova.network.neutron [-] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1464.241724] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.241964] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.384453] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802124, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.540376] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "56790738-4759-468a-9f43-f9c2bc2de23a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.946s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.579039] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.585902] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.941s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.586284] env[62519]: DEBUG nova.objects.instance [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1464.605899] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802121, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678304} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.607053] env[62519]: INFO nova.scheduler.client.report [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted allocations for instance 8b178cc0-db79-4ec2-8962-f31b936f8eff [ 1464.608166] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4c336ad1-8ce6-4f89-843e-0baae0d0dbda/4c336ad1-8ce6-4f89-843e-0baae0d0dbda.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1464.608479] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1464.611069] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b63c3a8-f3f4-4353-9bbd-a24c702ca143 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.631503] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1464.631503] env[62519]: value = "task-1802125" [ 1464.631503] env[62519]: _type = "Task" [ 1464.631503] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.645962] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802125, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.671187] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Releasing lock "refresh_cache-681ef7a9-3b24-450a-9034-6d30177995d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.671522] env[62519]: DEBUG nova.compute.manager [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Instance network_info: |[{"id": "0c70e934-3a08-4103-95df-ba8f3db80e91", "address": "fa:16:3e:a6:18:38", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c70e934-3a", "ovs_interfaceid": "0c70e934-3a08-4103-95df-ba8f3db80e91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1464.671823] env[62519]: DEBUG oslo_concurrency.lockutils [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] Acquired lock "refresh_cache-681ef7a9-3b24-450a-9034-6d30177995d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.671994] env[62519]: DEBUG nova.network.neutron [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Refreshing network info cache for port 0c70e934-3a08-4103-95df-ba8f3db80e91 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1464.673170] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:18:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c70e934-3a08-4103-95df-ba8f3db80e91', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1464.681799] env[62519]: DEBUG oslo.service.loopingcall [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1464.682787] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1464.683046] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b3c86b4-1c5c-48a5-b06f-03e8f50289b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.704026] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1464.704026] env[62519]: value = "task-1802126" [ 1464.704026] env[62519]: _type = "Task" [ 1464.704026] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.707428] env[62519]: DEBUG nova.network.neutron [-] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.715011] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802126, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.884598] env[62519]: DEBUG oslo_vmware.api [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802124, 'name': PowerOnVM_Task, 'duration_secs': 0.744581} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.884909] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1464.885147] env[62519]: INFO nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Took 10.34 seconds to spawn the instance on the hypervisor. [ 1464.885333] env[62519]: DEBUG nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1464.886229] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18769ce4-0c65-4608-8374-a0ab94c6d762 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.043896] env[62519]: DEBUG nova.compute.manager [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1465.117208] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7d5c54f5-351b-4d76-a79b-bca198a22919 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "8b178cc0-db79-4ec2-8962-f31b936f8eff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.099s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.143369] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802125, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080155} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.143582] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1465.144404] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89cfb920-0340-41ed-9da7-39fd6cca627b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.167557] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 4c336ad1-8ce6-4f89-843e-0baae0d0dbda/4c336ad1-8ce6-4f89-843e-0baae0d0dbda.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1465.168207] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e84a63b3-ba58-48a0-81f8-0881ff0ea5d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.190102] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1465.190102] env[62519]: value = "task-1802128" [ 1465.190102] env[62519]: _type = "Task" [ 1465.190102] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.198537] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802128, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.212567] env[62519]: INFO nova.compute.manager [-] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Took 1.03 seconds to deallocate network for instance. [ 1465.212882] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802126, 'name': CreateVM_Task, 'duration_secs': 0.363048} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.214452] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1465.215104] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.215266] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.215615] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1465.218569] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dceb3840-3070-47ff-82aa-288754eeb9d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.223733] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1465.223733] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52684af0-f069-aa27-a413-2808f28d1afe" [ 1465.223733] env[62519]: _type = "Task" [ 1465.223733] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.231903] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52684af0-f069-aa27-a413-2808f28d1afe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.412951] env[62519]: INFO nova.compute.manager [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Took 46.55 seconds to build instance. [ 1465.554521] env[62519]: DEBUG nova.network.neutron [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Updated VIF entry in instance network info cache for port 0c70e934-3a08-4103-95df-ba8f3db80e91. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1465.554933] env[62519]: DEBUG nova.network.neutron [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Updating instance_info_cache with network_info: [{"id": "0c70e934-3a08-4103-95df-ba8f3db80e91", "address": "fa:16:3e:a6:18:38", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c70e934-3a", "ovs_interfaceid": "0c70e934-3a08-4103-95df-ba8f3db80e91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.577820] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.596679] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d788e678-2770-4a2b-8ed0-e8cc74b5f814 tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.598760] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.377s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.598976] env[62519]: DEBUG nova.objects.instance [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1465.701309] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802128, 'name': ReconfigVM_Task, 'duration_secs': 0.351459} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.701747] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 4c336ad1-8ce6-4f89-843e-0baae0d0dbda/4c336ad1-8ce6-4f89-843e-0baae0d0dbda.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1465.702509] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2c2e7df-7884-4df3-8de3-bb46d1cf6b1d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.712032] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1465.712032] env[62519]: value = "task-1802129" [ 1465.712032] env[62519]: _type = "Task" [ 1465.712032] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.718923] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802129, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.722671] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.733590] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52684af0-f069-aa27-a413-2808f28d1afe, 'name': SearchDatastore_Task, 'duration_secs': 0.029775} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.733940] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1465.734199] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1465.734438] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.734607] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.734804] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1465.735108] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-964f83e9-db27-4063-9278-d3d8d0fa59aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.761024] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1465.761024] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1465.761698] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aaa2945-334b-4afa-abce-657ab86c0326 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.767970] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1465.767970] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529736de-e5dd-56b2-e304-1ddd845096ed" [ 1465.767970] env[62519]: _type = "Task" [ 1465.767970] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.777196] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529736de-e5dd-56b2-e304-1ddd845096ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.058161] env[62519]: DEBUG oslo_concurrency.lockutils [req-36c3bd30-ea66-4fc9-a110-90d7313c5e26 req-45809164-f47a-4b9a-b5e6-ca1043c57932 service nova] Releasing lock "refresh_cache-681ef7a9-3b24-450a-9034-6d30177995d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.219565] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802129, 'name': Rename_Task, 'duration_secs': 0.153812} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.219930] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1466.220159] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-316c02b3-05ff-434c-8a01-c0603b3ef53c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.227071] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1466.227071] env[62519]: value = "task-1802130" [ 1466.227071] env[62519]: _type = "Task" [ 1466.227071] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.234400] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.278663] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529736de-e5dd-56b2-e304-1ddd845096ed, 'name': SearchDatastore_Task, 'duration_secs': 0.0193} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.279460] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6af3ce18-4a9f-4501-badc-ce868cc7aea1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.284691] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1466.284691] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a3c96a-d160-4ce1-f6ad-f307c4f41f4c" [ 1466.284691] env[62519]: _type = "Task" [ 1466.284691] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.293253] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a3c96a-d160-4ce1-f6ad-f307c4f41f4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.607944] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4ab4b288-3acf-467a-a329-77277bf523db tempest-ServersAdmin275Test-1501358753 tempest-ServersAdmin275Test-1501358753-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.609753] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 27.171s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.618328] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "021c3287-5887-457e-9b3a-233308fb9b23" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.618589] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "021c3287-5887-457e-9b3a-233308fb9b23" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.618920] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "021c3287-5887-457e-9b3a-233308fb9b23-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.618983] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "021c3287-5887-457e-9b3a-233308fb9b23-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.619145] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "021c3287-5887-457e-9b3a-233308fb9b23-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.621367] env[62519]: INFO nova.compute.manager [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Terminating instance [ 1466.737094] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802130, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.795873] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a3c96a-d160-4ce1-f6ad-f307c4f41f4c, 'name': SearchDatastore_Task, 'duration_secs': 0.041418} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.796898] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.796898] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 681ef7a9-3b24-450a-9034-6d30177995d7/681ef7a9-3b24-450a-9034-6d30177995d7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1466.796898] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef31d509-a128-48ac-b95d-5947bdb8e5da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.802782] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1466.802782] env[62519]: value = "task-1802132" [ 1466.802782] env[62519]: _type = "Task" [ 1466.802782] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.810523] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.921456] env[62519]: DEBUG oslo_concurrency.lockutils [None req-070141c4-76cd-4253-8f36-904e93010092 tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.285s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.125740] env[62519]: DEBUG nova.compute.manager [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1467.126140] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1467.128451] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0013f371-4204-4f15-9a7c-b7e34caad4d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.139768] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1467.140189] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e721b62e-71ba-47c2-beef-a33e83be68ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.154357] env[62519]: DEBUG oslo_vmware.api [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1467.154357] env[62519]: value = "task-1802133" [ 1467.154357] env[62519]: _type = "Task" [ 1467.154357] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.166543] env[62519]: DEBUG oslo_vmware.api [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802133, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.242782] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802130, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.316410] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802132, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.425690] env[62519]: DEBUG nova.compute.manager [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1467.664297] env[62519]: DEBUG oslo_vmware.api [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802133, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.720430] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9563896e-fe42-4ed0-b7c9-3821c11ac71c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.734193] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1c9921-5c74-48ec-b961-337c3cd6dde6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.743210] env[62519]: DEBUG oslo_vmware.api [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802130, 'name': PowerOnVM_Task, 'duration_secs': 1.423383} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.767763] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1467.768041] env[62519]: INFO nova.compute.manager [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1467.768242] env[62519]: DEBUG nova.compute.manager [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1467.769705] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6575699e-73da-48ff-a5b0-9602197d6112 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.772997] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42580d1c-0170-4754-ab10-0e1f9fa901dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.788038] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ae6bee-f068-4e45-a776-7edb33a029c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.802372] env[62519]: DEBUG nova.compute.provider_tree [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1467.812506] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802132, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.831984} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.812752] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 681ef7a9-3b24-450a-9034-6d30177995d7/681ef7a9-3b24-450a-9034-6d30177995d7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1467.812943] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1467.813211] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7087b55b-dc9a-4d60-9ffd-d22aa7bd98c3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.820891] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1467.820891] env[62519]: value = "task-1802134" [ 1467.820891] env[62519]: _type = "Task" [ 1467.820891] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.831336] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802134, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.948438] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.165328] env[62519]: DEBUG oslo_vmware.api [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802133, 'name': PowerOffVM_Task, 'duration_secs': 0.635799} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.165627] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1468.165805] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1468.166094] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37cd64f5-dace-4dd3-8e9b-7379a4c7d2ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.258797] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1468.259146] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1468.259237] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Deleting the datastore file [datastore1] 021c3287-5887-457e-9b3a-233308fb9b23 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1468.259454] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9d9005e-3291-45de-8fbf-71cfecea7f47 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.265824] env[62519]: DEBUG oslo_vmware.api [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1468.265824] env[62519]: value = "task-1802136" [ 1468.265824] env[62519]: _type = "Task" [ 1468.265824] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.275496] env[62519]: DEBUG oslo_vmware.api [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.293871] env[62519]: INFO nova.compute.manager [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Took 45.64 seconds to build instance. [ 1468.332033] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802134, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.179894} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.332953] env[62519]: ERROR nova.scheduler.client.report [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [req-870cbc67-543d-473e-b473-876dce17aa81] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-870cbc67-543d-473e-b473-876dce17aa81"}]} [ 1468.333431] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1468.336344] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f3886b-5e30-4e64-a1bd-9fb88c23eb72 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.359337] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 681ef7a9-3b24-450a-9034-6d30177995d7/681ef7a9-3b24-450a-9034-6d30177995d7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1468.360349] env[62519]: DEBUG nova.scheduler.client.report [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1468.362852] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea741266-191c-4bac-ba20-ff5c2585936f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.384872] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1468.384872] env[62519]: value = "task-1802137" [ 1468.384872] env[62519]: _type = "Task" [ 1468.384872] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.391343] env[62519]: DEBUG nova.scheduler.client.report [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1468.391757] env[62519]: DEBUG nova.compute.provider_tree [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1468.400258] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802137, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.410978] env[62519]: DEBUG nova.scheduler.client.report [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1468.433129] env[62519]: DEBUG nova.scheduler.client.report [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1468.777084] env[62519]: DEBUG oslo_vmware.api [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.353689} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.780987] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1468.780987] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1468.780987] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1468.780987] env[62519]: INFO nova.compute.manager [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1468.780987] env[62519]: DEBUG oslo.service.loopingcall [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1468.781302] env[62519]: DEBUG nova.compute.manager [-] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1468.781302] env[62519]: DEBUG nova.network.neutron [-] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1468.897416] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802137, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.007731] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87269fd9-1180-4656-9a82-36cd6455f825 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.016864] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1fe77c-4235-43c0-a258-d6a98640675a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.059579] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee71d199-23d1-4fe5-af91-3bf83ef44ff1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.070682] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2e0d94-ea5c-449e-93ad-6f5a04f9325c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.092844] env[62519]: DEBUG nova.compute.provider_tree [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1469.393609] env[62519]: DEBUG nova.compute.manager [req-5587a759-84a5-488d-8c16-563d808a0b8e req-4c350bc5-d518-4dc7-b090-14c2ed7486fa service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Received event network-vif-deleted-60084240-cc60-4e07-9cae-3f7d36559e40 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1469.393947] env[62519]: INFO nova.compute.manager [req-5587a759-84a5-488d-8c16-563d808a0b8e req-4c350bc5-d518-4dc7-b090-14c2ed7486fa service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Neutron deleted interface 60084240-cc60-4e07-9cae-3f7d36559e40; detaching it from the instance and deleting it from the info cache [ 1469.394018] env[62519]: DEBUG nova.network.neutron [req-5587a759-84a5-488d-8c16-563d808a0b8e req-4c350bc5-d518-4dc7-b090-14c2ed7486fa service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.402755] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802137, 'name': ReconfigVM_Task, 'duration_secs': 0.804542} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.403259] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 681ef7a9-3b24-450a-9034-6d30177995d7/681ef7a9-3b24-450a-9034-6d30177995d7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1469.403899] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43d42b29-a4e8-4dc3-8464-2ef34133407a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.409029] env[62519]: DEBUG nova.compute.manager [req-39898582-4c65-44d3-9b06-937247435dea req-113fcaf1-9eb9-4aef-8973-f6580655cc4f service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Received event network-changed-57170226-5721-470d-8c8b-652ddff02ff6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1469.409259] env[62519]: DEBUG nova.compute.manager [req-39898582-4c65-44d3-9b06-937247435dea req-113fcaf1-9eb9-4aef-8973-f6580655cc4f service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Refreshing instance network info cache due to event network-changed-57170226-5721-470d-8c8b-652ddff02ff6. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1469.409471] env[62519]: DEBUG oslo_concurrency.lockutils [req-39898582-4c65-44d3-9b06-937247435dea req-113fcaf1-9eb9-4aef-8973-f6580655cc4f service nova] Acquiring lock "refresh_cache-4c336ad1-8ce6-4f89-843e-0baae0d0dbda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.409548] env[62519]: DEBUG oslo_concurrency.lockutils [req-39898582-4c65-44d3-9b06-937247435dea req-113fcaf1-9eb9-4aef-8973-f6580655cc4f service nova] Acquired lock "refresh_cache-4c336ad1-8ce6-4f89-843e-0baae0d0dbda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.409707] env[62519]: DEBUG nova.network.neutron [req-39898582-4c65-44d3-9b06-937247435dea req-113fcaf1-9eb9-4aef-8973-f6580655cc4f service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Refreshing network info cache for port 57170226-5721-470d-8c8b-652ddff02ff6 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1469.413024] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1469.413024] env[62519]: value = "task-1802139" [ 1469.413024] env[62519]: _type = "Task" [ 1469.413024] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.422971] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802139, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.628226] env[62519]: DEBUG nova.scheduler.client.report [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 54 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1469.628544] env[62519]: DEBUG nova.compute.provider_tree [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 54 to 55 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1469.628865] env[62519]: DEBUG nova.compute.provider_tree [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1469.780251] env[62519]: DEBUG nova.network.neutron [-] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.800995] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fd469e3e-0f07-46c3-9403-6ec88e595d61 tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.105s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.898168] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4eb2f04e-d8f0-44d8-9b11-cb82e588480b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.906940] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32824e23-bfd2-4fda-bb93-c832cbb8656c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.929881] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802139, 'name': Rename_Task, 'duration_secs': 0.485044} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.930107] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1469.930352] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30a4d3c6-74ae-40b9-8c62-4ec2e4912138 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.945323] env[62519]: DEBUG nova.compute.manager [req-5587a759-84a5-488d-8c16-563d808a0b8e req-4c350bc5-d518-4dc7-b090-14c2ed7486fa service nova] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Detach interface failed, port_id=60084240-cc60-4e07-9cae-3f7d36559e40, reason: Instance 021c3287-5887-457e-9b3a-233308fb9b23 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1469.947112] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1469.947112] env[62519]: value = "task-1802140" [ 1469.947112] env[62519]: _type = "Task" [ 1469.947112] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.954875] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802140, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.237941] env[62519]: DEBUG nova.network.neutron [req-39898582-4c65-44d3-9b06-937247435dea req-113fcaf1-9eb9-4aef-8973-f6580655cc4f service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Updated VIF entry in instance network info cache for port 57170226-5721-470d-8c8b-652ddff02ff6. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1470.238552] env[62519]: DEBUG nova.network.neutron [req-39898582-4c65-44d3-9b06-937247435dea req-113fcaf1-9eb9-4aef-8973-f6580655cc4f service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Updating instance_info_cache with network_info: [{"id": "57170226-5721-470d-8c8b-652ddff02ff6", "address": "fa:16:3e:bd:61:12", "network": {"id": "aa9fff60-aac4-4eb8-8e44-f3d41384fdd4", "bridge": "br-int", "label": "tempest-ServersTestJSON-287335508-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55bcb317741740ab86883dc9d0409fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57170226-57", "ovs_interfaceid": "57170226-5721-470d-8c8b-652ddff02ff6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.282314] env[62519]: INFO nova.compute.manager [-] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Took 1.50 seconds to deallocate network for instance. [ 1470.304248] env[62519]: DEBUG nova.compute.manager [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1470.459834] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802140, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.642101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 4.033s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.645092] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.759s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.645334] env[62519]: DEBUG nova.objects.instance [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lazy-loading 'resources' on Instance uuid c07e4d30-44bc-417b-8137-97f974aec932 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1470.740765] env[62519]: DEBUG oslo_concurrency.lockutils [req-39898582-4c65-44d3-9b06-937247435dea req-113fcaf1-9eb9-4aef-8973-f6580655cc4f service nova] Releasing lock "refresh_cache-4c336ad1-8ce6-4f89-843e-0baae0d0dbda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.789517] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.826530] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.960604] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802140, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.204830] env[62519]: INFO nova.scheduler.client.report [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Deleted allocation for migration f90a3c1a-0ac3-49d8-8f4b-3ad7b5d43ffd [ 1471.461451] env[62519]: DEBUG oslo_vmware.api [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802140, 'name': PowerOnVM_Task, 'duration_secs': 1.154621} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.461747] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1471.461910] env[62519]: INFO nova.compute.manager [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1471.462099] env[62519]: DEBUG nova.compute.manager [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1471.462882] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a369960-e1e1-4abf-ac77-97a3edeed272 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.651659] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9001933f-43ed-4368-a101-c8df7b88a2e2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.661757] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e0745f-d6b6-4dd8-a9c5-0af594f18fa4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.693378] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d59789-d472-4aee-b747-98c03956cf89 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.701259] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd5cce1-8030-4f65-8e36-2eb45993af48 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.715578] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b98629d6-2be9-4962-a917-529126d46bed tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 35.565s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.716855] env[62519]: DEBUG nova.compute.provider_tree [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1471.982085] env[62519]: INFO nova.compute.manager [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Took 44.09 seconds to build instance. [ 1472.220556] env[62519]: DEBUG nova.scheduler.client.report [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1472.688793] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab411c35-55c7-46de-b279-1796ad0b29ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.695770] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7be5f658-d17f-46c8-a11e-9426a34d90cc tempest-ServersAdminNegativeTestJSON-1265756228 tempest-ServersAdminNegativeTestJSON-1265756228-project-admin] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Suspending the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1472.696037] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-400d6bd3-70b3-4eb1-a20d-1c30fcf9fa75 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.703092] env[62519]: DEBUG oslo_vmware.api [None req-7be5f658-d17f-46c8-a11e-9426a34d90cc tempest-ServersAdminNegativeTestJSON-1265756228 tempest-ServersAdminNegativeTestJSON-1265756228-project-admin] Waiting for the task: (returnval){ [ 1472.703092] env[62519]: value = "task-1802142" [ 1472.703092] env[62519]: _type = "Task" [ 1472.703092] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.711138] env[62519]: DEBUG oslo_vmware.api [None req-7be5f658-d17f-46c8-a11e-9426a34d90cc tempest-ServersAdminNegativeTestJSON-1265756228 tempest-ServersAdminNegativeTestJSON-1265756228-project-admin] Task: {'id': task-1802142, 'name': SuspendVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.732450] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.087s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.735736] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.756s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.736819] env[62519]: INFO nova.compute.claims [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1472.761988] env[62519]: INFO nova.scheduler.client.report [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Deleted allocations for instance c07e4d30-44bc-417b-8137-97f974aec932 [ 1473.214539] env[62519]: DEBUG oslo_vmware.api [None req-7be5f658-d17f-46c8-a11e-9426a34d90cc tempest-ServersAdminNegativeTestJSON-1265756228 tempest-ServersAdminNegativeTestJSON-1265756228-project-admin] Task: {'id': task-1802142, 'name': SuspendVM_Task} progress is 12%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.272055] env[62519]: DEBUG oslo_concurrency.lockutils [None req-beeaee40-aeb0-4929-99b9-82dcecee9d8f tempest-ServersAdmin275Test-1657935187 tempest-ServersAdmin275Test-1657935187-project-member] Lock "c07e4d30-44bc-417b-8137-97f974aec932" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.644s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.494551] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d6584dc1-dea2-47ef-848a-6ee291fa58db tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "681ef7a9-3b24-450a-9034-6d30177995d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.732s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.717737] env[62519]: DEBUG oslo_vmware.api [None req-7be5f658-d17f-46c8-a11e-9426a34d90cc tempest-ServersAdminNegativeTestJSON-1265756228 tempest-ServersAdminNegativeTestJSON-1265756228-project-admin] Task: {'id': task-1802142, 'name': SuspendVM_Task, 'duration_secs': 0.942254} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.718132] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7be5f658-d17f-46c8-a11e-9426a34d90cc tempest-ServersAdminNegativeTestJSON-1265756228 tempest-ServersAdminNegativeTestJSON-1265756228-project-admin] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Suspended the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1473.718418] env[62519]: DEBUG nova.compute.manager [None req-7be5f658-d17f-46c8-a11e-9426a34d90cc tempest-ServersAdminNegativeTestJSON-1265756228 tempest-ServersAdminNegativeTestJSON-1265756228-project-admin] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1473.719439] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fbdbe5-d151-469a-9cd1-e0e2f650b2ef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.999442] env[62519]: DEBUG nova.compute.manager [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1474.002264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "a1551278-a306-4534-8d8d-3b3a003dde04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.002483] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "a1551278-a306-4534-8d8d-3b3a003dde04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.223790] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41749d81-9ec7-4fe4-9cef-44a7a2ac724d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.233533] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1350d5aa-997d-4d65-b7fb-1fed47bd64d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.266612] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c71d4d0-4108-4ace-9031-e314937f1812 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.273509] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916106de-3523-47f4-ac7a-492d8aa06ba7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.288479] env[62519]: DEBUG nova.compute.provider_tree [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1474.522974] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.810363] env[62519]: ERROR nova.scheduler.client.report [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [req-e621e37d-27ac-45e3-a845-2fec650d3ab0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e621e37d-27ac-45e3-a845-2fec650d3ab0"}]} [ 1474.827857] env[62519]: DEBUG nova.scheduler.client.report [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1474.842396] env[62519]: DEBUG nova.scheduler.client.report [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1474.842396] env[62519]: DEBUG nova.compute.provider_tree [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1474.853975] env[62519]: DEBUG nova.scheduler.client.report [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1474.874711] env[62519]: DEBUG nova.scheduler.client.report [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1475.382586] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708d817d-5576-44bf-aab2-8945dea4820b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.392831] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f922f43a-d4cf-42ba-86fe-162ab9fd6ebd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.433444] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d618549c-59ad-43d2-b58d-637983557a55 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.446025] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e7da99-8b7b-4f40-919f-8840009e8ea4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.457640] env[62519]: DEBUG nova.compute.provider_tree [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1475.999081] env[62519]: DEBUG nova.scheduler.client.report [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 57 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1475.999368] env[62519]: DEBUG nova.compute.provider_tree [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 57 to 58 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1475.999520] env[62519]: DEBUG nova.compute.provider_tree [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1476.214306] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "681ef7a9-3b24-450a-9034-6d30177995d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.214642] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "681ef7a9-3b24-450a-9034-6d30177995d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.214858] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "681ef7a9-3b24-450a-9034-6d30177995d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.215058] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "681ef7a9-3b24-450a-9034-6d30177995d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.215219] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "681ef7a9-3b24-450a-9034-6d30177995d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.217646] env[62519]: INFO nova.compute.manager [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Terminating instance [ 1476.504817] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.769s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.505888] env[62519]: DEBUG nova.compute.manager [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1476.510008] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.111s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.512495] env[62519]: INFO nova.compute.claims [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1476.721581] env[62519]: DEBUG nova.compute.manager [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1476.721917] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1476.722690] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568aa5f1-0cd7-4b8b-b7d1-e0ee2d179e38 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.731408] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1476.731693] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdf2f3fc-5c60-49f1-8fef-672157893551 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.020997] env[62519]: DEBUG nova.compute.utils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1477.021481] env[62519]: DEBUG nova.compute.manager [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1477.021646] env[62519]: DEBUG nova.network.neutron [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1477.111427] env[62519]: DEBUG nova.policy [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '317da34374674698bc67ec498455a09d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd044d408a8674580b0f5cd52ca6e756d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1477.528387] env[62519]: DEBUG nova.compute.manager [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1477.581053] env[62519]: DEBUG nova.network.neutron [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Successfully created port: 53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1477.773710] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1477.773900] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1477.774096] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Deleting the datastore file [datastore1] 681ef7a9-3b24-450a-9034-6d30177995d7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1477.776856] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-497fccdb-a177-4aca-9de2-be5369e864ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.788593] env[62519]: DEBUG oslo_vmware.api [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1477.788593] env[62519]: value = "task-1802147" [ 1477.788593] env[62519]: _type = "Task" [ 1477.788593] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.798454] env[62519]: DEBUG oslo_vmware.api [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.057374] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquiring lock "1b4efc0f-474f-4a32-b199-c14f27b183e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.058414] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "1b4efc0f-474f-4a32-b199-c14f27b183e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.139117] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2696cad7-5976-46c7-b9b3-f59d82eb8de1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.148813] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e132078-fe20-48d2-a03e-6215c2829afb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.184793] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c7eea0-3dd9-4c2d-baf2-84f9d45c69f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.193571] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc3c491-b670-42cc-9eb6-75e6cde73e54 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.207685] env[62519]: DEBUG nova.compute.provider_tree [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1478.300660] env[62519]: DEBUG oslo_vmware.api [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.545718] env[62519]: DEBUG nova.compute.manager [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1478.576685] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1478.578204] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1478.578417] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1478.578854] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1478.578854] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1478.578954] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1478.579129] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1478.579302] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1478.579471] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1478.579631] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1478.579797] env[62519]: DEBUG nova.virt.hardware [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1478.581586] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1225803-196a-442a-8e02-8079ec98e527 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.592873] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58af3e30-dcc0-4ac0-90bc-1e5f840e3658 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.711970] env[62519]: DEBUG nova.scheduler.client.report [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1478.805230] env[62519]: DEBUG oslo_vmware.api [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.222202] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.222202] env[62519]: DEBUG nova.compute.manager [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1479.223735] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.283s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.226022] env[62519]: INFO nova.compute.claims [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1479.258540] env[62519]: DEBUG nova.compute.manager [req-54f22d08-bac9-439e-8a78-51354fd82c4c req-496bf4b0-929e-46e0-8729-d0ed5a54397d service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Received event network-vif-plugged-53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1479.258797] env[62519]: DEBUG oslo_concurrency.lockutils [req-54f22d08-bac9-439e-8a78-51354fd82c4c req-496bf4b0-929e-46e0-8729-d0ed5a54397d service nova] Acquiring lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.259213] env[62519]: DEBUG oslo_concurrency.lockutils [req-54f22d08-bac9-439e-8a78-51354fd82c4c req-496bf4b0-929e-46e0-8729-d0ed5a54397d service nova] Lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.260517] env[62519]: DEBUG oslo_concurrency.lockutils [req-54f22d08-bac9-439e-8a78-51354fd82c4c req-496bf4b0-929e-46e0-8729-d0ed5a54397d service nova] Lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.260517] env[62519]: DEBUG nova.compute.manager [req-54f22d08-bac9-439e-8a78-51354fd82c4c req-496bf4b0-929e-46e0-8729-d0ed5a54397d service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] No waiting events found dispatching network-vif-plugged-53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1479.260517] env[62519]: WARNING nova.compute.manager [req-54f22d08-bac9-439e-8a78-51354fd82c4c req-496bf4b0-929e-46e0-8729-d0ed5a54397d service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Received unexpected event network-vif-plugged-53db4677-3e45-4941-aa9d-ab7dee4a3da6 for instance with vm_state building and task_state spawning. [ 1479.280941] env[62519]: DEBUG nova.network.neutron [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Successfully updated port: 53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1479.310590] env[62519]: DEBUG oslo_vmware.api [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.730649] env[62519]: DEBUG nova.compute.utils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1479.734349] env[62519]: DEBUG nova.compute.manager [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1479.734958] env[62519]: DEBUG nova.network.neutron [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1479.783735] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.783883] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquired lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.784040] env[62519]: DEBUG nova.network.neutron [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1479.802459] env[62519]: DEBUG oslo_vmware.api [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.807196] env[62519]: DEBUG nova.policy [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c5237532edf4379b5399ffbb0520bad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a80d45ef751d4b00a4f96dc88918a088', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1480.235486] env[62519]: DEBUG nova.compute.manager [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1480.306542] env[62519]: DEBUG oslo_vmware.api [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.517307} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.306956] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1480.307267] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1480.307567] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1480.307851] env[62519]: INFO nova.compute.manager [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Took 3.59 seconds to destroy the instance on the hypervisor. [ 1480.308507] env[62519]: DEBUG oslo.service.loopingcall [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.311098] env[62519]: DEBUG nova.compute.manager [-] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1480.311316] env[62519]: DEBUG nova.network.neutron [-] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1480.339873] env[62519]: DEBUG nova.network.neutron [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1480.543825] env[62519]: DEBUG nova.network.neutron [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Successfully created port: 11826675-9830-43c6-a3ed-cc8329005aa2 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1480.565931] env[62519]: DEBUG nova.network.neutron [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Updating instance_info_cache with network_info: [{"id": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "address": "fa:16:3e:85:dd:21", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53db4677-3e", "ovs_interfaceid": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.858815] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739b0257-4ddc-4e71-a3d5-2cf97c9bc26a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.866873] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c005d29-2caf-4798-afdc-2d93c3fdede3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.897849] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd535c24-99c1-455a-8175-99d08f91b769 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.904322] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee834aa2-c4d7-4eaf-ada3-f93e63f7f28c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.917501] env[62519]: DEBUG nova.compute.provider_tree [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1480.965355] env[62519]: DEBUG nova.compute.manager [req-e23c79e3-5a91-4c71-bc25-fcfe3c93c447 req-1c9cd185-22b5-4669-af43-614ae32ad42d service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Received event network-vif-deleted-0c70e934-3a08-4103-95df-ba8f3db80e91 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1480.965542] env[62519]: INFO nova.compute.manager [req-e23c79e3-5a91-4c71-bc25-fcfe3c93c447 req-1c9cd185-22b5-4669-af43-614ae32ad42d service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Neutron deleted interface 0c70e934-3a08-4103-95df-ba8f3db80e91; detaching it from the instance and deleting it from the info cache [ 1480.965753] env[62519]: DEBUG nova.network.neutron [req-e23c79e3-5a91-4c71-bc25-fcfe3c93c447 req-1c9cd185-22b5-4669-af43-614ae32ad42d service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.071025] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Releasing lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.071153] env[62519]: DEBUG nova.compute.manager [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Instance network_info: |[{"id": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "address": "fa:16:3e:85:dd:21", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53db4677-3e", "ovs_interfaceid": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1481.071772] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:dd:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53db4677-3e45-4941-aa9d-ab7dee4a3da6', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1481.080332] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Creating folder: Project (d044d408a8674580b0f5cd52ca6e756d). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1481.080647] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddc9c507-8454-46de-8f40-f680499ee3cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.091462] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Created folder: Project (d044d408a8674580b0f5cd52ca6e756d) in parent group-v373567. [ 1481.091663] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Creating folder: Instances. Parent ref: group-v373639. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1481.091895] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66cba71f-3aef-46fe-8ff7-5271053715e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.100288] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Created folder: Instances in parent group-v373639. [ 1481.100513] env[62519]: DEBUG oslo.service.loopingcall [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1481.100698] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1481.100892] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67b3da64-6cb2-4e98-b27a-e9538dfb29e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.121424] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1481.121424] env[62519]: value = "task-1802150" [ 1481.121424] env[62519]: _type = "Task" [ 1481.121424] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.128909] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802150, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.206279] env[62519]: DEBUG nova.network.neutron [-] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.248905] env[62519]: DEBUG nova.compute.manager [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1481.278486] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1481.278729] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1481.278890] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1481.279058] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1481.279196] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1481.279339] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1481.279540] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1481.279690] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1481.279848] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1481.280012] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1481.280186] env[62519]: DEBUG nova.virt.hardware [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1481.281903] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af3844f-52fb-48eb-8985-b4522ebfadd0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.289796] env[62519]: DEBUG nova.compute.manager [req-5876e9f9-cbd9-47cf-b202-5ad19eb3b627 req-b0cb15c0-6656-41cc-b18c-c3c388683385 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Received event network-changed-53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1481.289983] env[62519]: DEBUG nova.compute.manager [req-5876e9f9-cbd9-47cf-b202-5ad19eb3b627 req-b0cb15c0-6656-41cc-b18c-c3c388683385 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Refreshing instance network info cache due to event network-changed-53db4677-3e45-4941-aa9d-ab7dee4a3da6. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1481.290208] env[62519]: DEBUG oslo_concurrency.lockutils [req-5876e9f9-cbd9-47cf-b202-5ad19eb3b627 req-b0cb15c0-6656-41cc-b18c-c3c388683385 service nova] Acquiring lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.290356] env[62519]: DEBUG oslo_concurrency.lockutils [req-5876e9f9-cbd9-47cf-b202-5ad19eb3b627 req-b0cb15c0-6656-41cc-b18c-c3c388683385 service nova] Acquired lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.290519] env[62519]: DEBUG nova.network.neutron [req-5876e9f9-cbd9-47cf-b202-5ad19eb3b627 req-b0cb15c0-6656-41cc-b18c-c3c388683385 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Refreshing network info cache for port 53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1481.292168] env[62519]: DEBUG oslo_concurrency.lockutils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Acquiring lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.292388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.297462] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba4bf38-85fa-46be-bfc1-41a14207d97d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.422103] env[62519]: DEBUG nova.scheduler.client.report [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1481.469817] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ec1a951-d1ae-4414-aadf-3aec722965a4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.478298] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b0d43f-3063-4c25-a95e-12c802353fda {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.510249] env[62519]: DEBUG nova.compute.manager [req-e23c79e3-5a91-4c71-bc25-fcfe3c93c447 req-1c9cd185-22b5-4669-af43-614ae32ad42d service nova] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Detach interface failed, port_id=0c70e934-3a08-4103-95df-ba8f3db80e91, reason: Instance 681ef7a9-3b24-450a-9034-6d30177995d7 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1481.632747] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802150, 'name': CreateVM_Task, 'duration_secs': 0.369764} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.632914] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1481.633596] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.633770] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.634096] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1481.634342] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ea3c906-371e-498d-b5fa-86b73e08255f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.639081] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1481.639081] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52475e9c-7fd3-3d0b-4025-1dc0bfee828c" [ 1481.639081] env[62519]: _type = "Task" [ 1481.639081] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.646527] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52475e9c-7fd3-3d0b-4025-1dc0bfee828c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.709848] env[62519]: INFO nova.compute.manager [-] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Took 1.40 seconds to deallocate network for instance. [ 1481.927435] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.927974] env[62519]: DEBUG nova.compute.manager [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1481.930549] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.921s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.932045] env[62519]: INFO nova.compute.claims [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1482.037459] env[62519]: DEBUG nova.network.neutron [req-5876e9f9-cbd9-47cf-b202-5ad19eb3b627 req-b0cb15c0-6656-41cc-b18c-c3c388683385 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Updated VIF entry in instance network info cache for port 53db4677-3e45-4941-aa9d-ab7dee4a3da6. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1482.037459] env[62519]: DEBUG nova.network.neutron [req-5876e9f9-cbd9-47cf-b202-5ad19eb3b627 req-b0cb15c0-6656-41cc-b18c-c3c388683385 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Updating instance_info_cache with network_info: [{"id": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "address": "fa:16:3e:85:dd:21", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53db4677-3e", "ovs_interfaceid": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.150212] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52475e9c-7fd3-3d0b-4025-1dc0bfee828c, 'name': SearchDatastore_Task, 'duration_secs': 0.157519} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.150699] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.150968] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1482.151262] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.151443] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.151655] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1482.151959] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac787933-1bd1-4bff-a755-fc510d41d832 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.162622] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1482.162814] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1482.164075] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16d564a5-a075-43a1-9fd1-f821f7d03cc7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.170323] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1482.170323] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d600a9-60d2-e325-c108-1c3d43f015b2" [ 1482.170323] env[62519]: _type = "Task" [ 1482.170323] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.179199] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d600a9-60d2-e325-c108-1c3d43f015b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.217644] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.360704] env[62519]: DEBUG nova.network.neutron [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Successfully updated port: 11826675-9830-43c6-a3ed-cc8329005aa2 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1482.438540] env[62519]: DEBUG nova.compute.utils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1482.440172] env[62519]: DEBUG nova.compute.manager [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Not allocating networking since 'none' was specified. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1985}} [ 1482.539404] env[62519]: DEBUG oslo_concurrency.lockutils [req-5876e9f9-cbd9-47cf-b202-5ad19eb3b627 req-b0cb15c0-6656-41cc-b18c-c3c388683385 service nova] Releasing lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.681048] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d600a9-60d2-e325-c108-1c3d43f015b2, 'name': SearchDatastore_Task, 'duration_secs': 0.008416} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.681811] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f855714-8194-47d5-ac90-dbae0f4841e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.686775] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1482.686775] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cb1429-c99c-66f1-8be5-cc58ca681867" [ 1482.686775] env[62519]: _type = "Task" [ 1482.686775] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.694113] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cb1429-c99c-66f1-8be5-cc58ca681867, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.863716] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquiring lock "refresh_cache-f7d5c77d-6c78-4969-b511-2b03ab624c84" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.863716] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquired lock "refresh_cache-f7d5c77d-6c78-4969-b511-2b03ab624c84" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.863904] env[62519]: DEBUG nova.network.neutron [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1482.943351] env[62519]: DEBUG nova.compute.manager [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1483.199728] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cb1429-c99c-66f1-8be5-cc58ca681867, 'name': SearchDatastore_Task, 'duration_secs': 0.008933} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.199728] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.199965] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4a29bff8-050a-4ad5-9d06-3a59c40b97ee/4a29bff8-050a-4ad5-9d06-3a59c40b97ee.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1483.200176] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a6df57a-43d8-4c5c-83d3-99d32c4cb0c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.207561] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1483.207561] env[62519]: value = "task-1802151" [ 1483.207561] env[62519]: _type = "Task" [ 1483.207561] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.219590] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.352731] env[62519]: DEBUG nova.compute.manager [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Received event network-vif-plugged-11826675-9830-43c6-a3ed-cc8329005aa2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1483.352731] env[62519]: DEBUG oslo_concurrency.lockutils [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] Acquiring lock "f7d5c77d-6c78-4969-b511-2b03ab624c84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.352731] env[62519]: DEBUG oslo_concurrency.lockutils [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] Lock "f7d5c77d-6c78-4969-b511-2b03ab624c84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.352958] env[62519]: DEBUG oslo_concurrency.lockutils [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] Lock "f7d5c77d-6c78-4969-b511-2b03ab624c84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.353049] env[62519]: DEBUG nova.compute.manager [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] No waiting events found dispatching network-vif-plugged-11826675-9830-43c6-a3ed-cc8329005aa2 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1483.353206] env[62519]: WARNING nova.compute.manager [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Received unexpected event network-vif-plugged-11826675-9830-43c6-a3ed-cc8329005aa2 for instance with vm_state building and task_state spawning. [ 1483.353356] env[62519]: DEBUG nova.compute.manager [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Received event network-changed-11826675-9830-43c6-a3ed-cc8329005aa2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1483.353517] env[62519]: DEBUG nova.compute.manager [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Refreshing instance network info cache due to event network-changed-11826675-9830-43c6-a3ed-cc8329005aa2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1483.354563] env[62519]: DEBUG oslo_concurrency.lockutils [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] Acquiring lock "refresh_cache-f7d5c77d-6c78-4969-b511-2b03ab624c84" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.409997] env[62519]: DEBUG nova.network.neutron [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1483.443216] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76dd8e1b-b1cc-4e63-ae87-823a3ca40a00 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.457774] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ab3601-8199-47db-b282-d3aa6bc412ff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.502427] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4801120c-622e-49ca-ad76-4db21b977eb2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.512648] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21a8714-f7ce-412c-b212-81981d2a4ddd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.528769] env[62519]: DEBUG nova.compute.provider_tree [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1483.661614] env[62519]: DEBUG nova.network.neutron [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Updating instance_info_cache with network_info: [{"id": "11826675-9830-43c6-a3ed-cc8329005aa2", "address": "fa:16:3e:50:03:3c", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11826675-98", "ovs_interfaceid": "11826675-9830-43c6-a3ed-cc8329005aa2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.718063] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802151, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500313} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.718357] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4a29bff8-050a-4ad5-9d06-3a59c40b97ee/4a29bff8-050a-4ad5-9d06-3a59c40b97ee.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1483.718571] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1483.718822] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-013bb4de-6834-4a7a-ae50-de7b3627dee0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.727257] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1483.727257] env[62519]: value = "task-1802152" [ 1483.727257] env[62519]: _type = "Task" [ 1483.727257] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.736084] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802152, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.958534] env[62519]: DEBUG nova.compute.manager [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1483.985036] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1483.985541] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1483.986462] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1483.986462] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1483.986462] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1483.986725] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1483.986725] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1483.987711] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1483.987711] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1483.987711] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1483.987711] env[62519]: DEBUG nova.virt.hardware [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1483.988282] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592e402e-fc58-4108-bcc6-352435b03bc1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.997228] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30415313-4615-4763-8478-23500f06d83a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.011025] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1484.016265] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Creating folder: Project (730699e3af3748d1a44c4662ad0579e8). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1484.016533] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1147f3d-ac7b-49f9-8dfc-73b10654b1b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.032261] env[62519]: DEBUG nova.scheduler.client.report [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1484.037484] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Created folder: Project (730699e3af3748d1a44c4662ad0579e8) in parent group-v373567. [ 1484.037616] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Creating folder: Instances. Parent ref: group-v373642. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1484.037843] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba19b8a8-0dad-4f41-8ac9-5ca78ac577c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.047242] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Created folder: Instances in parent group-v373642. [ 1484.047478] env[62519]: DEBUG oslo.service.loopingcall [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1484.047661] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1484.047856] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-737dea28-2cab-4496-a99c-ac4a272a9af3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.064463] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1484.064463] env[62519]: value = "task-1802155" [ 1484.064463] env[62519]: _type = "Task" [ 1484.064463] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.073227] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802155, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.164673] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Releasing lock "refresh_cache-f7d5c77d-6c78-4969-b511-2b03ab624c84" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.165106] env[62519]: DEBUG nova.compute.manager [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Instance network_info: |[{"id": "11826675-9830-43c6-a3ed-cc8329005aa2", "address": "fa:16:3e:50:03:3c", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11826675-98", "ovs_interfaceid": "11826675-9830-43c6-a3ed-cc8329005aa2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1484.165506] env[62519]: DEBUG oslo_concurrency.lockutils [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] Acquired lock "refresh_cache-f7d5c77d-6c78-4969-b511-2b03ab624c84" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.165762] env[62519]: DEBUG nova.network.neutron [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Refreshing network info cache for port 11826675-9830-43c6-a3ed-cc8329005aa2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1484.167318] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:03:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11826675-9830-43c6-a3ed-cc8329005aa2', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1484.175851] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Creating folder: Project (a80d45ef751d4b00a4f96dc88918a088). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1484.178973] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7723a49-c4e2-4239-ba66-1e99b929b36e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.192365] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Created folder: Project (a80d45ef751d4b00a4f96dc88918a088) in parent group-v373567. [ 1484.192638] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Creating folder: Instances. Parent ref: group-v373645. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1484.192885] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56e343c9-0dc7-4714-9eb6-cae2d16c62c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.203333] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Created folder: Instances in parent group-v373645. [ 1484.203627] env[62519]: DEBUG oslo.service.loopingcall [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1484.203861] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1484.204129] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-603ff1d8-3873-460a-a1c4-f438c588ced2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.225493] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1484.225493] env[62519]: value = "task-1802158" [ 1484.225493] env[62519]: _type = "Task" [ 1484.225493] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.236853] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802158, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.239780] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802152, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068764} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.240052] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1484.240832] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981981d6-f158-422b-803c-614f91f168e6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.266567] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 4a29bff8-050a-4ad5-9d06-3a59c40b97ee/4a29bff8-050a-4ad5-9d06-3a59c40b97ee.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1484.266567] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-530a57e2-8b55-4da2-85ed-2ec78e6ab4ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.288833] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1484.288833] env[62519]: value = "task-1802159" [ 1484.288833] env[62519]: _type = "Task" [ 1484.288833] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.297396] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802159, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.537900] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.538530] env[62519]: DEBUG nova.compute.manager [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1484.542338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.949s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.542669] env[62519]: DEBUG nova.objects.instance [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lazy-loading 'resources' on Instance uuid c612a1be-fb39-416d-a9d2-d206582e5aeb {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1484.577977] env[62519]: DEBUG nova.network.neutron [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Updated VIF entry in instance network info cache for port 11826675-9830-43c6-a3ed-cc8329005aa2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1484.578402] env[62519]: DEBUG nova.network.neutron [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Updating instance_info_cache with network_info: [{"id": "11826675-9830-43c6-a3ed-cc8329005aa2", "address": "fa:16:3e:50:03:3c", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11826675-98", "ovs_interfaceid": "11826675-9830-43c6-a3ed-cc8329005aa2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.586054] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802155, 'name': CreateVM_Task, 'duration_secs': 0.333345} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.586921] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1484.587184] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.587639] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.588056] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1484.588388] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d31b758e-dda8-4a93-ab81-69e58176d4d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.596420] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1484.596420] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521c6eae-0ef5-1850-0893-4a474e6f7941" [ 1484.596420] env[62519]: _type = "Task" [ 1484.596420] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.606819] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521c6eae-0ef5-1850-0893-4a474e6f7941, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.736033] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802158, 'name': CreateVM_Task, 'duration_secs': 0.4134} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.736160] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1484.736818] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.799078] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802159, 'name': ReconfigVM_Task, 'duration_secs': 0.334781} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.799384] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 4a29bff8-050a-4ad5-9d06-3a59c40b97ee/4a29bff8-050a-4ad5-9d06-3a59c40b97ee.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1484.800020] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31114f4d-6fbc-42db-8218-7e8216b7d813 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.807931] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1484.807931] env[62519]: value = "task-1802160" [ 1484.807931] env[62519]: _type = "Task" [ 1484.807931] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.816526] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802160, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.046053] env[62519]: DEBUG nova.compute.utils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1485.047466] env[62519]: DEBUG nova.compute.manager [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1485.047638] env[62519]: DEBUG nova.network.neutron [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1485.088112] env[62519]: DEBUG oslo_concurrency.lockutils [req-4eb8be0d-172b-4f56-a1a8-1e967230c2f9 req-37f7cd4c-76b1-4ca8-9bd2-99b5557ee8de service nova] Releasing lock "refresh_cache-f7d5c77d-6c78-4969-b511-2b03ab624c84" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.109470] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521c6eae-0ef5-1850-0893-4a474e6f7941, 'name': SearchDatastore_Task, 'duration_secs': 0.01282} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.110868] env[62519]: DEBUG nova.policy [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5206cbd7eee2460cb041e0df1142c8ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb743623be114e74bb9f73bbca086979', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1485.112763] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.113072] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1485.113272] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.113416] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.113647] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1485.116252] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.116564] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1485.116833] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c93ee73-b56a-408a-aa46-883c3c9a145a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.119437] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4f0a2af-ad26-4eef-b575-2920385be310 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.125264] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1485.125264] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a227eb-c32f-c0ff-6606-9d00d717fa32" [ 1485.125264] env[62519]: _type = "Task" [ 1485.125264] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.131781] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1485.131962] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1485.133136] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae408af1-8571-4968-a02d-48b1e046a2a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.138813] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a227eb-c32f-c0ff-6606-9d00d717fa32, 'name': SearchDatastore_Task, 'duration_secs': 0.010311} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.139416] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.139672] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1485.139866] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.145982] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1485.145982] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d08242-b824-396b-8218-3876cf94ba63" [ 1485.145982] env[62519]: _type = "Task" [ 1485.145982] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.155081] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d08242-b824-396b-8218-3876cf94ba63, 'name': SearchDatastore_Task, 'duration_secs': 0.010231} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.158245] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f90d3110-26d0-419d-bd21-d8d95a64678d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.163562] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1485.163562] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a20f45-5cc7-9487-c078-67dd30d6dac0" [ 1485.163562] env[62519]: _type = "Task" [ 1485.163562] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.172636] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a20f45-5cc7-9487-c078-67dd30d6dac0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.321691] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802160, 'name': Rename_Task, 'duration_secs': 0.143178} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.321960] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1485.322282] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39f798a0-f6e2-4d00-8e63-585b0bdb31c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.330126] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1485.330126] env[62519]: value = "task-1802161" [ 1485.330126] env[62519]: _type = "Task" [ 1485.330126] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.343058] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.544028] env[62519]: DEBUG nova.network.neutron [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Successfully created port: e1c64107-2501-4168-900e-4b4153b778b9 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1485.552884] env[62519]: DEBUG nova.compute.manager [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1485.613742] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ab0e8c-17ea-4bde-afeb-4f79591d8455 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.622571] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5473c8b-c86c-4200-8aac-ac5aba8ff736 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.655314] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2cda24-a0e8-4285-8806-9bcca497eb54 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.671189] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85412fd1-f0e7-4307-b156-bc706384b512 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.682144] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a20f45-5cc7-9487-c078-67dd30d6dac0, 'name': SearchDatastore_Task, 'duration_secs': 0.0094} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.689032] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.689220] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939/1462d213-3f9a-4c60-8056-0b68f20a4939.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1485.689690] env[62519]: DEBUG nova.compute.provider_tree [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.691044] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.691214] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1485.691429] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97abbcc5-17e9-438c-a388-5b12023d8e19 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.693810] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f391650-88af-4f61-8488-2c34001fd2f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.703214] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1485.703214] env[62519]: value = "task-1802162" [ 1485.703214] env[62519]: _type = "Task" [ 1485.703214] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.704334] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1485.704624] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1485.707989] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a307099e-af02-4539-b277-e3514ab91454 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.714703] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1485.714703] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bac40-2092-3053-9737-cbf1a3fce625" [ 1485.714703] env[62519]: _type = "Task" [ 1485.714703] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.718764] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.730492] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bac40-2092-3053-9737-cbf1a3fce625, 'name': SearchDatastore_Task, 'duration_secs': 0.009268} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.731237] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c989a19-ac34-492d-8004-d28c09ac0010 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.738243] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1485.738243] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ac0ded-8d6d-67c5-ad26-d9ac3cda2d23" [ 1485.738243] env[62519]: _type = "Task" [ 1485.738243] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.747141] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ac0ded-8d6d-67c5-ad26-d9ac3cda2d23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.841145] env[62519]: DEBUG oslo_vmware.api [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802161, 'name': PowerOnVM_Task, 'duration_secs': 0.466061} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.841439] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1485.841668] env[62519]: INFO nova.compute.manager [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Took 7.30 seconds to spawn the instance on the hypervisor. [ 1485.841889] env[62519]: DEBUG nova.compute.manager [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1485.842688] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ea3dde-ea5a-4b38-a82e-85090711bff5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.195790] env[62519]: DEBUG nova.scheduler.client.report [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1486.215933] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802162, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472323} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.217132] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939/1462d213-3f9a-4c60-8056-0b68f20a4939.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1486.217455] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1486.217803] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-595e6351-7cf1-4cd5-b095-9a342d7dee21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.226553] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1486.226553] env[62519]: value = "task-1802163" [ 1486.226553] env[62519]: _type = "Task" [ 1486.226553] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.237103] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802163, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.251763] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ac0ded-8d6d-67c5-ad26-d9ac3cda2d23, 'name': SearchDatastore_Task, 'duration_secs': 0.009591} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.252036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.252292] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f7d5c77d-6c78-4969-b511-2b03ab624c84/f7d5c77d-6c78-4969-b511-2b03ab624c84.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1486.252543] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-653ff118-200b-4292-a841-544d601f42b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.259401] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1486.259401] env[62519]: value = "task-1802164" [ 1486.259401] env[62519]: _type = "Task" [ 1486.259401] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.268371] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.361025] env[62519]: INFO nova.compute.manager [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Took 42.40 seconds to build instance. [ 1486.559654] env[62519]: DEBUG nova.compute.manager [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1486.586216] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1486.586427] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1486.586589] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1486.586738] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1486.586885] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1486.587040] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1486.587250] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1486.587407] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1486.587569] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1486.587729] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1486.587900] env[62519]: DEBUG nova.virt.hardware [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1486.589062] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffe2393-e290-4c27-91f8-0edb7ecb06df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.597415] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59026274-e4fd-4f2c-9a4b-2bc3280f45cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.701397] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.704410] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.181s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.705153] env[62519]: INFO nova.compute.claims [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1486.725441] env[62519]: INFO nova.scheduler.client.report [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Deleted allocations for instance c612a1be-fb39-416d-a9d2-d206582e5aeb [ 1486.742893] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802163, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061461} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.743145] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1486.743947] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c287a852-e8f2-4d2e-b575-d9f212c88dfd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.766383] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939/1462d213-3f9a-4c60-8056-0b68f20a4939.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1486.767744] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b8cea78-aea1-4b65-b5a7-e22a6e3af0b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.791747] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.793431] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1486.793431] env[62519]: value = "task-1802165" [ 1486.793431] env[62519]: _type = "Task" [ 1486.793431] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.802013] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802165, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.142441] env[62519]: DEBUG nova.compute.manager [req-0a027089-6302-42cd-9380-26fd55085eb2 req-62a036bd-a5c9-4efc-affb-504b9491febd service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Received event network-changed-53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1487.142667] env[62519]: DEBUG nova.compute.manager [req-0a027089-6302-42cd-9380-26fd55085eb2 req-62a036bd-a5c9-4efc-affb-504b9491febd service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Refreshing instance network info cache due to event network-changed-53db4677-3e45-4941-aa9d-ab7dee4a3da6. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1487.142843] env[62519]: DEBUG oslo_concurrency.lockutils [req-0a027089-6302-42cd-9380-26fd55085eb2 req-62a036bd-a5c9-4efc-affb-504b9491febd service nova] Acquiring lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.143024] env[62519]: DEBUG oslo_concurrency.lockutils [req-0a027089-6302-42cd-9380-26fd55085eb2 req-62a036bd-a5c9-4efc-affb-504b9491febd service nova] Acquired lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.143281] env[62519]: DEBUG nova.network.neutron [req-0a027089-6302-42cd-9380-26fd55085eb2 req-62a036bd-a5c9-4efc-affb-504b9491febd service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Refreshing network info cache for port 53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1487.239855] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b9c37cf-110c-4ed1-b062-9198747e913f tempest-InstanceActionsV221TestJSON-1566764076 tempest-InstanceActionsV221TestJSON-1566764076-project-member] Lock "c612a1be-fb39-416d-a9d2-d206582e5aeb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.329s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.277097] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.303076] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802165, 'name': ReconfigVM_Task, 'duration_secs': 0.266059} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.303367] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939/1462d213-3f9a-4c60-8056-0b68f20a4939.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1487.304043] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1ac8f293-b4ef-40be-ba60-17524e82c263 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.311963] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1487.311963] env[62519]: value = "task-1802166" [ 1487.311963] env[62519]: _type = "Task" [ 1487.311963] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.320602] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802166, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.667764] env[62519]: DEBUG nova.network.neutron [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Successfully updated port: e1c64107-2501-4168-900e-4b4153b778b9 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1487.777634] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802164, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.825810] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802166, 'name': Rename_Task, 'duration_secs': 0.155136} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.828700] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1487.829268] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dee3aa8c-8534-45ad-904a-2a737ed7a94a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.838082] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1487.838082] env[62519]: value = "task-1802167" [ 1487.838082] env[62519]: _type = "Task" [ 1487.838082] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.853667] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802167, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.867229] env[62519]: DEBUG oslo_concurrency.lockutils [None req-702f15b2-0754-4c5b-b9ec-b97a88471fff tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.611s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.882215] env[62519]: DEBUG nova.network.neutron [req-0a027089-6302-42cd-9380-26fd55085eb2 req-62a036bd-a5c9-4efc-affb-504b9491febd service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Updated VIF entry in instance network info cache for port 53db4677-3e45-4941-aa9d-ab7dee4a3da6. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1487.882468] env[62519]: DEBUG nova.network.neutron [req-0a027089-6302-42cd-9380-26fd55085eb2 req-62a036bd-a5c9-4efc-affb-504b9491febd service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Updating instance_info_cache with network_info: [{"id": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "address": "fa:16:3e:85:dd:21", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53db4677-3e", "ovs_interfaceid": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.168185] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "refresh_cache-40c7a9b8-d541-464a-ba87-76cfc183ae31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.168357] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquired lock "refresh_cache-40c7a9b8-d541-464a-ba87-76cfc183ae31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.168507] env[62519]: DEBUG nova.network.neutron [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1488.278847] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802164, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.52728} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.279134] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f7d5c77d-6c78-4969-b511-2b03ab624c84/f7d5c77d-6c78-4969-b511-2b03ab624c84.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1488.279349] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1488.279600] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a86032d-43a1-44eb-b964-902e481586f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.287314] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1488.287314] env[62519]: value = "task-1802168" [ 1488.287314] env[62519]: _type = "Task" [ 1488.287314] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.301166] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.330803] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eee4cb3-4953-449a-a196-06bc411dd8b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.342761] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1255c3ab-d715-45e3-a9bc-038f48a40c25 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.352240] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802167, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.376510] env[62519]: DEBUG nova.compute.manager [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1488.380511] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6647f1d1-0288-4cb2-b938-62ecab3a47db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.386441] env[62519]: DEBUG oslo_concurrency.lockutils [req-0a027089-6302-42cd-9380-26fd55085eb2 req-62a036bd-a5c9-4efc-affb-504b9491febd service nova] Releasing lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.390378] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4101284a-8269-4048-beec-58a34ffc2784 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.408757] env[62519]: DEBUG nova.compute.provider_tree [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.715098] env[62519]: DEBUG nova.network.neutron [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1488.800607] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11111} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.800987] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1488.801786] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0265939-ad71-4917-8987-ccff546a507f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.827427] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] f7d5c77d-6c78-4969-b511-2b03ab624c84/f7d5c77d-6c78-4969-b511-2b03ab624c84.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1488.830324] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21bcbd57-3df4-4b72-9888-62e96279f115 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.857023] env[62519]: DEBUG oslo_vmware.api [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802167, 'name': PowerOnVM_Task, 'duration_secs': 0.773617} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.857392] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1488.858019] env[62519]: INFO nova.compute.manager [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Took 4.90 seconds to spawn the instance on the hypervisor. [ 1488.858019] env[62519]: DEBUG nova.compute.manager [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1488.858178] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1488.858178] env[62519]: value = "task-1802169" [ 1488.858178] env[62519]: _type = "Task" [ 1488.858178] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.859494] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0282e0-dda5-4810-8542-fc57daa2cde4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.870868] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802169, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.908628] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.916374] env[62519]: DEBUG nova.scheduler.client.report [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1489.149847] env[62519]: DEBUG nova.network.neutron [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Updating instance_info_cache with network_info: [{"id": "e1c64107-2501-4168-900e-4b4153b778b9", "address": "fa:16:3e:f1:6a:08", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1c64107-25", "ovs_interfaceid": "e1c64107-2501-4168-900e-4b4153b778b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.355211] env[62519]: DEBUG nova.compute.manager [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Received event network-vif-plugged-e1c64107-2501-4168-900e-4b4153b778b9 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1489.355598] env[62519]: DEBUG oslo_concurrency.lockutils [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] Acquiring lock "40c7a9b8-d541-464a-ba87-76cfc183ae31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.356179] env[62519]: DEBUG oslo_concurrency.lockutils [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] Lock "40c7a9b8-d541-464a-ba87-76cfc183ae31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.356499] env[62519]: DEBUG oslo_concurrency.lockutils [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] Lock "40c7a9b8-d541-464a-ba87-76cfc183ae31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.357093] env[62519]: DEBUG nova.compute.manager [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] No waiting events found dispatching network-vif-plugged-e1c64107-2501-4168-900e-4b4153b778b9 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1489.357397] env[62519]: WARNING nova.compute.manager [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Received unexpected event network-vif-plugged-e1c64107-2501-4168-900e-4b4153b778b9 for instance with vm_state building and task_state spawning. [ 1489.357874] env[62519]: DEBUG nova.compute.manager [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Received event network-changed-e1c64107-2501-4168-900e-4b4153b778b9 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1489.359508] env[62519]: DEBUG nova.compute.manager [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Refreshing instance network info cache due to event network-changed-e1c64107-2501-4168-900e-4b4153b778b9. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1489.359508] env[62519]: DEBUG oslo_concurrency.lockutils [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] Acquiring lock "refresh_cache-40c7a9b8-d541-464a-ba87-76cfc183ae31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.373770] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.382205] env[62519]: INFO nova.compute.manager [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Took 40.46 seconds to build instance. [ 1489.424202] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.718s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.424202] env[62519]: DEBUG nova.compute.manager [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1489.425578] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 33.161s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.425802] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.425980] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1489.426302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.609s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.428391] env[62519]: INFO nova.compute.claims [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1489.433718] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7dcbc49-dee1-45bc-be57-434e9f9c86f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.444932] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012dcdab-9382-44f7-b562-b389df4d4efc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.466364] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c6040f-35c6-478f-80ad-9d9eb680d7d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.479038] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5318504d-7be8-46a4-87b7-c3e61753dfe4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.515463] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178919MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1489.515681] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.656611] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Releasing lock "refresh_cache-40c7a9b8-d541-464a-ba87-76cfc183ae31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.657332] env[62519]: DEBUG nova.compute.manager [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Instance network_info: |[{"id": "e1c64107-2501-4168-900e-4b4153b778b9", "address": "fa:16:3e:f1:6a:08", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1c64107-25", "ovs_interfaceid": "e1c64107-2501-4168-900e-4b4153b778b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1489.657961] env[62519]: DEBUG oslo_concurrency.lockutils [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] Acquired lock "refresh_cache-40c7a9b8-d541-464a-ba87-76cfc183ae31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.658214] env[62519]: DEBUG nova.network.neutron [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Refreshing network info cache for port e1c64107-2501-4168-900e-4b4153b778b9 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1489.659474] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:6a:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1c64107-2501-4168-900e-4b4153b778b9', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1489.668218] env[62519]: DEBUG oslo.service.loopingcall [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1489.669135] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1489.669358] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-825c2021-a43f-4c0c-a361-5807204f8b28 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.692318] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1489.692318] env[62519]: value = "task-1802170" [ 1489.692318] env[62519]: _type = "Task" [ 1489.692318] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.700988] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802170, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.877114] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802169, 'name': ReconfigVM_Task, 'duration_secs': 0.73163} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.878183] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Reconfigured VM instance instance-0000001a to attach disk [datastore1] f7d5c77d-6c78-4969-b511-2b03ab624c84/f7d5c77d-6c78-4969-b511-2b03ab624c84.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1489.878325] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9537bdda-91bb-4235-a129-0fe653caea3d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.887460] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1489.887460] env[62519]: value = "task-1802171" [ 1489.887460] env[62519]: _type = "Task" [ 1489.887460] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.896493] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802171, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.935852] env[62519]: DEBUG nova.compute.utils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1489.939880] env[62519]: DEBUG nova.compute.manager [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1489.940202] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1490.043493] env[62519]: DEBUG nova.policy [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef1d07fc4c0a400dab4ec451e7c1fcfc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08813c8c5d0b45dbab5a05ed08ef9531', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1490.113477] env[62519]: INFO nova.compute.manager [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Rebuilding instance [ 1490.185051] env[62519]: DEBUG nova.compute.manager [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1490.185992] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f6507d-dc00-45ad-b985-cdae2ab5e533 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.209779] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802170, 'name': CreateVM_Task, 'duration_secs': 0.477783} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.209971] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1490.210697] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.210880] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.212059] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1490.212139] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64b2f4d1-9dd0-41e6-a86c-456ce4727e86 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.217803] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1490.217803] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5259bc7b-fd34-c4c5-88fb-55183f4426fb" [ 1490.217803] env[62519]: _type = "Task" [ 1490.217803] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.228050] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5259bc7b-fd34-c4c5-88fb-55183f4426fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.387037] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Successfully created port: dd1ebcef-cdc1-4641-8622-443a2365b497 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1490.407073] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802171, 'name': Rename_Task, 'duration_secs': 0.214616} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.407371] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1490.407638] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a72a8a58-c5ed-40cb-80d7-b094bc62282f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.416894] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1490.416894] env[62519]: value = "task-1802172" [ 1490.416894] env[62519]: _type = "Task" [ 1490.416894] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.426156] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802172, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.441034] env[62519]: DEBUG nova.compute.manager [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1490.618440] env[62519]: DEBUG nova.network.neutron [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Updated VIF entry in instance network info cache for port e1c64107-2501-4168-900e-4b4153b778b9. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1490.618664] env[62519]: DEBUG nova.network.neutron [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Updating instance_info_cache with network_info: [{"id": "e1c64107-2501-4168-900e-4b4153b778b9", "address": "fa:16:3e:f1:6a:08", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1c64107-25", "ovs_interfaceid": "e1c64107-2501-4168-900e-4b4153b778b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.681691] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Successfully created port: 6a3b6bdf-835f-429c-982d-00028308b4b2 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1490.734416] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5259bc7b-fd34-c4c5-88fb-55183f4426fb, 'name': SearchDatastore_Task, 'duration_secs': 0.01154} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.734733] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.734960] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1490.735222] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.735364] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.735534] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1490.735818] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61c75964-4182-4947-b672-be35db602170 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.746905] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1490.747042] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1490.747741] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac18d93a-a395-4b56-8e6c-d9cfb6b00c56 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.756982] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1490.756982] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b056cb-575d-9959-9499-0024220b1619" [ 1490.756982] env[62519]: _type = "Task" [ 1490.756982] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.768012] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b056cb-575d-9959-9499-0024220b1619, 'name': SearchDatastore_Task, 'duration_secs': 0.011255} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.769184] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2769e6a-9730-41e2-b79f-12f6e703483d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.779098] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1490.779098] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523bcf0a-b17e-8d22-b2c8-ee1061673c8e" [ 1490.779098] env[62519]: _type = "Task" [ 1490.779098] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.790882] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523bcf0a-b17e-8d22-b2c8-ee1061673c8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.898788] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7fddec09-f5a6-4eb5-bfe2-235b0e99801c tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "1462d213-3f9a-4c60-8056-0b68f20a4939" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.334s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.933671] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802172, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.960760] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Successfully created port: 7d9ab028-27a4-47e2-8a46-08e1672bd6fb {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1491.123190] env[62519]: DEBUG oslo_concurrency.lockutils [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] Releasing lock "refresh_cache-40c7a9b8-d541-464a-ba87-76cfc183ae31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.123190] env[62519]: DEBUG nova.compute.manager [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Received event network-changed-53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1491.123190] env[62519]: DEBUG nova.compute.manager [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Refreshing instance network info cache due to event network-changed-53db4677-3e45-4941-aa9d-ab7dee4a3da6. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1491.123190] env[62519]: DEBUG oslo_concurrency.lockutils [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] Acquiring lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.123190] env[62519]: DEBUG oslo_concurrency.lockutils [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] Acquired lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.123391] env[62519]: DEBUG nova.network.neutron [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Refreshing network info cache for port 53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1491.126729] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a27f3a3-efa6-4227-b9f6-b0c800a4e7f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.136535] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3e3515-b08a-4103-b2f9-a767565db3eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.180343] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0ca441-35ed-4d2e-b82f-ab15b3f2b579 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.192440] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0932c3a8-62c0-4aae-8f8c-a0aaba95f125 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.208462] env[62519]: DEBUG nova.compute.provider_tree [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.214027] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1491.214329] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfaefb3e-42a6-4200-8df1-5948d5985a63 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.223971] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1491.223971] env[62519]: value = "task-1802173" [ 1491.223971] env[62519]: _type = "Task" [ 1491.223971] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.233558] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802173, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.291994] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523bcf0a-b17e-8d22-b2c8-ee1061673c8e, 'name': SearchDatastore_Task, 'duration_secs': 0.012449} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.292449] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.292696] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 40c7a9b8-d541-464a-ba87-76cfc183ae31/40c7a9b8-d541-464a-ba87-76cfc183ae31.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1491.293066] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-363d4899-5b52-4a88-a65b-0c3297f3fcec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.302587] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1491.302587] env[62519]: value = "task-1802174" [ 1491.302587] env[62519]: _type = "Task" [ 1491.302587] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.312205] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.402291] env[62519]: DEBUG nova.compute.manager [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1491.429913] env[62519]: DEBUG oslo_vmware.api [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802172, 'name': PowerOnVM_Task, 'duration_secs': 0.916613} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.430432] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1491.432021] env[62519]: INFO nova.compute.manager [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Took 10.18 seconds to spawn the instance on the hypervisor. [ 1491.432021] env[62519]: DEBUG nova.compute.manager [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1491.432021] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a73298-a4ac-4db8-8dfb-101b3023c91d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.459225] env[62519]: DEBUG nova.compute.manager [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1491.483848] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1491.483848] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.483848] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1491.483848] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.484196] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1491.484414] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1491.484639] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1491.485278] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1491.485278] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1491.485278] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1491.485419] env[62519]: DEBUG nova.virt.hardware [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1491.486666] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0340b0b-ee49-4c64-87bd-1ea6db983c96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.496151] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce26ebc9-b504-4d2f-8dab-0395b9d4436d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.714028] env[62519]: DEBUG nova.scheduler.client.report [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1491.740779] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802173, 'name': PowerOffVM_Task, 'duration_secs': 0.199515} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.744403] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1491.744997] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1491.746339] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089508a0-0490-463e-bb2a-d843f4bb2ba2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.756346] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1491.756847] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5bf125e-83b8-4ea0-874e-ed182dd9237c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.787866] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1491.790022] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1491.790022] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Deleting the datastore file [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1491.790022] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1517b2e8-0cfd-46e1-85db-5f6e99fddd8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.802021] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1491.802021] env[62519]: value = "task-1802176" [ 1491.802021] env[62519]: _type = "Task" [ 1491.802021] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.817084] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.821429] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802174, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.923674] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.953240] env[62519]: INFO nova.compute.manager [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Took 45.58 seconds to build instance. [ 1492.004047] env[62519]: DEBUG nova.network.neutron [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Updated VIF entry in instance network info cache for port 53db4677-3e45-4941-aa9d-ab7dee4a3da6. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1492.006813] env[62519]: DEBUG nova.network.neutron [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Updating instance_info_cache with network_info: [{"id": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "address": "fa:16:3e:85:dd:21", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53db4677-3e", "ovs_interfaceid": "53db4677-3e45-4941-aa9d-ab7dee4a3da6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.219964] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.794s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.220530] env[62519]: DEBUG nova.compute.manager [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1492.223179] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.756s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.223370] env[62519]: DEBUG nova.objects.instance [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1492.237237] env[62519]: DEBUG nova.compute.manager [None req-f4937512-5d3a-4267-8f7e-6f25926b25ef tempest-ServerExternalEventsTest-1258659452 tempest-ServerExternalEventsTest-1258659452-project] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Received event network-changed {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1492.237946] env[62519]: DEBUG nova.compute.manager [None req-f4937512-5d3a-4267-8f7e-6f25926b25ef tempest-ServerExternalEventsTest-1258659452 tempest-ServerExternalEventsTest-1258659452-project] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Refreshing instance network info cache due to event network-changed. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1492.237946] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f4937512-5d3a-4267-8f7e-6f25926b25ef tempest-ServerExternalEventsTest-1258659452 tempest-ServerExternalEventsTest-1258659452-project] Acquiring lock "refresh_cache-f7d5c77d-6c78-4969-b511-2b03ab624c84" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.237946] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f4937512-5d3a-4267-8f7e-6f25926b25ef tempest-ServerExternalEventsTest-1258659452 tempest-ServerExternalEventsTest-1258659452-project] Acquired lock "refresh_cache-f7d5c77d-6c78-4969-b511-2b03ab624c84" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.237946] env[62519]: DEBUG nova.network.neutron [None req-f4937512-5d3a-4267-8f7e-6f25926b25ef tempest-ServerExternalEventsTest-1258659452 tempest-ServerExternalEventsTest-1258659452-project] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1492.314259] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802176, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107809} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.314922] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1492.315026] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1492.315224] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1492.320983] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534458} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.321450] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 40c7a9b8-d541-464a-ba87-76cfc183ae31/40c7a9b8-d541-464a-ba87-76cfc183ae31.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1492.321662] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1492.321902] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71e9fc9c-86d7-4439-b904-7b4d8466c067 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.330559] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1492.330559] env[62519]: value = "task-1802177" [ 1492.330559] env[62519]: _type = "Task" [ 1492.330559] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.341643] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802177, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.412535] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "ad374dd9-a92d-4b76-9609-7562346e05a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.412769] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.500767] env[62519]: DEBUG nova.compute.manager [req-bd7c5aa8-e201-496d-ab74-d33bee1c0b68 req-ec05a847-af03-4375-8618-bd3f7fabcd5d service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received event network-vif-plugged-dd1ebcef-cdc1-4641-8622-443a2365b497 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1492.500767] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd7c5aa8-e201-496d-ab74-d33bee1c0b68 req-ec05a847-af03-4375-8618-bd3f7fabcd5d service nova] Acquiring lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.500767] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd7c5aa8-e201-496d-ab74-d33bee1c0b68 req-ec05a847-af03-4375-8618-bd3f7fabcd5d service nova] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.500767] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd7c5aa8-e201-496d-ab74-d33bee1c0b68 req-ec05a847-af03-4375-8618-bd3f7fabcd5d service nova] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.501064] env[62519]: DEBUG nova.compute.manager [req-bd7c5aa8-e201-496d-ab74-d33bee1c0b68 req-ec05a847-af03-4375-8618-bd3f7fabcd5d service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] No waiting events found dispatching network-vif-plugged-dd1ebcef-cdc1-4641-8622-443a2365b497 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1492.501199] env[62519]: WARNING nova.compute.manager [req-bd7c5aa8-e201-496d-ab74-d33bee1c0b68 req-ec05a847-af03-4375-8618-bd3f7fabcd5d service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received unexpected event network-vif-plugged-dd1ebcef-cdc1-4641-8622-443a2365b497 for instance with vm_state building and task_state spawning. [ 1492.509648] env[62519]: DEBUG oslo_concurrency.lockutils [req-bbf284d7-8efc-4d3e-ba4e-129a07a4d870 req-7066fd20-d6a7-48ee-8fe9-b9d878662972 service nova] Releasing lock "refresh_cache-4a29bff8-050a-4ad5-9d06-3a59c40b97ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.540568] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Successfully updated port: dd1ebcef-cdc1-4641-8622-443a2365b497 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1492.729645] env[62519]: DEBUG nova.compute.utils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1492.732302] env[62519]: DEBUG nova.compute.manager [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1492.732479] env[62519]: DEBUG nova.network.neutron [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1492.772044] env[62519]: DEBUG nova.policy [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8355a912c5b4aef8ad7f30c2e64fdc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '555fa612766f4b5fa173664ca3fa496c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1492.806627] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquiring lock "f7d5c77d-6c78-4969-b511-2b03ab624c84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.841368] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802177, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.254804} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.841624] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1492.842430] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51069878-93b7-43a6-b1b4-d4a0ef613493 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.873016] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 40c7a9b8-d541-464a-ba87-76cfc183ae31/40c7a9b8-d541-464a-ba87-76cfc183ae31.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1492.875895] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-385a0422-ef37-4030-b6f8-8871eece58ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.901010] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1492.901010] env[62519]: value = "task-1802178" [ 1492.901010] env[62519]: _type = "Task" [ 1492.901010] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.912959] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802178, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.064307] env[62519]: DEBUG nova.network.neutron [None req-f4937512-5d3a-4267-8f7e-6f25926b25ef tempest-ServerExternalEventsTest-1258659452 tempest-ServerExternalEventsTest-1258659452-project] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Updating instance_info_cache with network_info: [{"id": "11826675-9830-43c6-a3ed-cc8329005aa2", "address": "fa:16:3e:50:03:3c", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11826675-98", "ovs_interfaceid": "11826675-9830-43c6-a3ed-cc8329005aa2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.117199] env[62519]: DEBUG nova.network.neutron [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Successfully created port: d1be096c-c8b0-4dd6-98d9-8289626a2f66 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1493.235298] env[62519]: DEBUG nova.compute.manager [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1493.239809] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d99a69b3-9d56-41a5-a9b3-088dd2dc5654 tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.244021] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.664s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.244021] env[62519]: INFO nova.compute.claims [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1493.361198] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1493.361456] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1493.361607] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1493.361800] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1493.361949] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1493.362198] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1493.362423] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1493.362579] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1493.362739] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1493.362896] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1493.363122] env[62519]: DEBUG nova.virt.hardware [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1493.364040] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e0ad28-f674-4802-9952-e5659ba18af2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.374227] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df01b2bd-4440-4453-96ba-1f274c886428 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.397719] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.405635] env[62519]: DEBUG oslo.service.loopingcall [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.406024] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1493.412099] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7659d846-c71a-4e33-af07-cd2c9c3342f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.444081] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802178, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.445820] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.445820] env[62519]: value = "task-1802179" [ 1493.445820] env[62519]: _type = "Task" [ 1493.445820] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.456208] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802179, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.460100] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7abad8d6-8e42-4549-a192-8cdae3b16ee9 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "f7d5c77d-6c78-4969-b511-2b03ab624c84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.032s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.461454] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "f7d5c77d-6c78-4969-b511-2b03ab624c84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.655s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.461674] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquiring lock "f7d5c77d-6c78-4969-b511-2b03ab624c84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.461875] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "f7d5c77d-6c78-4969-b511-2b03ab624c84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.462052] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "f7d5c77d-6c78-4969-b511-2b03ab624c84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.464080] env[62519]: INFO nova.compute.manager [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Terminating instance [ 1493.569138] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f4937512-5d3a-4267-8f7e-6f25926b25ef tempest-ServerExternalEventsTest-1258659452 tempest-ServerExternalEventsTest-1258659452-project] Releasing lock "refresh_cache-f7d5c77d-6c78-4969-b511-2b03ab624c84" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.917209] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802178, 'name': ReconfigVM_Task, 'duration_secs': 0.671719} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.917509] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 40c7a9b8-d541-464a-ba87-76cfc183ae31/40c7a9b8-d541-464a-ba87-76cfc183ae31.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1493.918593] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d61f20df-74b2-47c9-a4a2-463c543651b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.926583] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1493.926583] env[62519]: value = "task-1802180" [ 1493.926583] env[62519]: _type = "Task" [ 1493.926583] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.936032] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802180, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.956063] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802179, 'name': CreateVM_Task, 'duration_secs': 0.321147} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.956257] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1493.956742] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.956937] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.957309] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1493.957586] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5018294f-c58f-44dd-adb7-1d0ae512cfbc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.963096] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1493.963096] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f49318-7408-9ca0-20fb-488a29f0a99d" [ 1493.963096] env[62519]: _type = "Task" [ 1493.963096] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.967157] env[62519]: DEBUG nova.compute.manager [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1493.970371] env[62519]: DEBUG nova.compute.manager [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1493.970371] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1493.971982] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048e3e5c-f36b-4ae2-9863-7aa043d650e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.979577] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1493.983544] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3fd3d49-7cea-4213-85a3-8fd48fbe4b6f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.985679] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f49318-7408-9ca0-20fb-488a29f0a99d, 'name': SearchDatastore_Task, 'duration_secs': 0.010621} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.985679] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.985829] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1493.986107] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.986268] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.986449] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1493.987117] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7a11b8b-3dd7-48a2-941c-021702dac6a9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.993673] env[62519]: DEBUG oslo_vmware.api [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1493.993673] env[62519]: value = "task-1802181" [ 1493.993673] env[62519]: _type = "Task" [ 1493.993673] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.997985] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1493.998213] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1493.999336] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-550da9f8-b8d7-4e8e-bb3c-ba67219dc21e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.007290] env[62519]: DEBUG oslo_vmware.api [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.011202] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1494.011202] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529f2ead-a521-40b3-bafc-86bc46a4e1ca" [ 1494.011202] env[62519]: _type = "Task" [ 1494.011202] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.021226] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529f2ead-a521-40b3-bafc-86bc46a4e1ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.248071] env[62519]: DEBUG nova.compute.manager [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1494.269565] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1494.269857] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1494.270034] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1494.270217] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1494.270362] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1494.270505] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1494.270712] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1494.271146] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1494.271146] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1494.271296] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1494.271464] env[62519]: DEBUG nova.virt.hardware [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1494.273085] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e390a7-ef57-4db8-b4fb-0ce64d2fc204 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.282811] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58602b16-a823-4ccc-8600-b4b87c4469f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.440466] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802180, 'name': Rename_Task, 'duration_secs': 0.176053} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.440749] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1494.440992] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26cd7cba-23c0-40e1-ac45-876251dc4995 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.449029] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1494.449029] env[62519]: value = "task-1802182" [ 1494.449029] env[62519]: _type = "Task" [ 1494.449029] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.459291] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.488150] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.507195] env[62519]: DEBUG oslo_vmware.api [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802181, 'name': PowerOffVM_Task, 'duration_secs': 0.206147} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.507657] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1494.507981] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1494.509209] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a0622dc-8dfe-4244-837e-5c76171d947b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.523635] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529f2ead-a521-40b3-bafc-86bc46a4e1ca, 'name': SearchDatastore_Task, 'duration_secs': 0.011163} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.526774] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-110e67af-fdd7-4fdb-806e-2c99bb53bc08 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.531887] env[62519]: DEBUG nova.compute.manager [req-14090579-d8e5-4ee7-a054-2538fe764236 req-b8ca08c6-6b26-4e94-9610-f0e2b5f62294 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received event network-changed-dd1ebcef-cdc1-4641-8622-443a2365b497 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1494.532094] env[62519]: DEBUG nova.compute.manager [req-14090579-d8e5-4ee7-a054-2538fe764236 req-b8ca08c6-6b26-4e94-9610-f0e2b5f62294 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Refreshing instance network info cache due to event network-changed-dd1ebcef-cdc1-4641-8622-443a2365b497. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1494.532309] env[62519]: DEBUG oslo_concurrency.lockutils [req-14090579-d8e5-4ee7-a054-2538fe764236 req-b8ca08c6-6b26-4e94-9610-f0e2b5f62294 service nova] Acquiring lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.532452] env[62519]: DEBUG oslo_concurrency.lockutils [req-14090579-d8e5-4ee7-a054-2538fe764236 req-b8ca08c6-6b26-4e94-9610-f0e2b5f62294 service nova] Acquired lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.532612] env[62519]: DEBUG nova.network.neutron [req-14090579-d8e5-4ee7-a054-2538fe764236 req-b8ca08c6-6b26-4e94-9610-f0e2b5f62294 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Refreshing network info cache for port dd1ebcef-cdc1-4641-8622-443a2365b497 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1494.538325] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1494.538325] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522adf93-090b-d52b-22e0-4409e4bb87fa" [ 1494.538325] env[62519]: _type = "Task" [ 1494.538325] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.551174] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522adf93-090b-d52b-22e0-4409e4bb87fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.612360] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1494.612360] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1494.612360] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Deleting the datastore file [datastore1] f7d5c77d-6c78-4969-b511-2b03ab624c84 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1494.612685] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8ed4b04-e687-483e-b850-b55fe9012bbb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.624316] env[62519]: DEBUG oslo_vmware.api [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for the task: (returnval){ [ 1494.624316] env[62519]: value = "task-1802184" [ 1494.624316] env[62519]: _type = "Task" [ 1494.624316] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.635076] env[62519]: DEBUG oslo_vmware.api [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.867585] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adec8240-df14-43a1-89b9-1c947d5f93e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.877369] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb6db7a-f59b-4eb3-94c2-6fe579b476bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.914860] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de455084-cd2e-4f76-b759-522f6adc093b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.926518] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f36dbd-3ac2-4291-b85d-600ced643c96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.945285] env[62519]: DEBUG nova.compute.provider_tree [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.960793] env[62519]: DEBUG oslo_vmware.api [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802182, 'name': PowerOnVM_Task, 'duration_secs': 0.492872} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.961276] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1494.961517] env[62519]: INFO nova.compute.manager [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Took 8.40 seconds to spawn the instance on the hypervisor. [ 1494.961731] env[62519]: DEBUG nova.compute.manager [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1494.962588] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d79c6c1-a804-44d0-92a9-d1fe0e12155c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.987464] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Successfully updated port: 6a3b6bdf-835f-429c-982d-00028308b4b2 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1494.991546] env[62519]: DEBUG nova.network.neutron [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Successfully updated port: d1be096c-c8b0-4dd6-98d9-8289626a2f66 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1495.050336] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522adf93-090b-d52b-22e0-4409e4bb87fa, 'name': SearchDatastore_Task, 'duration_secs': 0.012661} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.050617] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.050928] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939/1462d213-3f9a-4c60-8056-0b68f20a4939.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1495.051210] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a78704d0-a195-4335-b4dc-d113edab1a97 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.060824] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1495.060824] env[62519]: value = "task-1802185" [ 1495.060824] env[62519]: _type = "Task" [ 1495.060824] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.070757] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802185, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.071719] env[62519]: DEBUG nova.network.neutron [req-14090579-d8e5-4ee7-a054-2538fe764236 req-b8ca08c6-6b26-4e94-9610-f0e2b5f62294 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1495.138042] env[62519]: DEBUG oslo_vmware.api [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Task: {'id': task-1802184, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146319} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.138442] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1495.138740] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1495.139108] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1495.139356] env[62519]: INFO nova.compute.manager [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1495.139723] env[62519]: DEBUG oslo.service.loopingcall [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.140017] env[62519]: DEBUG nova.compute.manager [-] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1495.140232] env[62519]: DEBUG nova.network.neutron [-] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1495.179966] env[62519]: DEBUG nova.network.neutron [req-14090579-d8e5-4ee7-a054-2538fe764236 req-b8ca08c6-6b26-4e94-9610-f0e2b5f62294 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.452021] env[62519]: DEBUG nova.scheduler.client.report [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1495.483020] env[62519]: INFO nova.compute.manager [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Took 44.50 seconds to build instance. [ 1495.493643] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "refresh_cache-f59a31e4-7fb9-4de7-b35f-da811a305f85" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.495094] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "refresh_cache-f59a31e4-7fb9-4de7-b35f-da811a305f85" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.495094] env[62519]: DEBUG nova.network.neutron [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1495.572508] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802185, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503955} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.574190] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939/1462d213-3f9a-4c60-8056-0b68f20a4939.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1495.574190] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1495.575353] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd1c35a2-2387-4e63-9220-68d61baab0d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.586706] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1495.586706] env[62519]: value = "task-1802186" [ 1495.586706] env[62519]: _type = "Task" [ 1495.586706] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.595981] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802186, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.683753] env[62519]: DEBUG oslo_concurrency.lockutils [req-14090579-d8e5-4ee7-a054-2538fe764236 req-b8ca08c6-6b26-4e94-9610-f0e2b5f62294 service nova] Releasing lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.933246] env[62519]: DEBUG nova.network.neutron [-] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.955833] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.956894] env[62519]: DEBUG nova.compute.manager [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1495.959413] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.237s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.959719] env[62519]: DEBUG nova.objects.instance [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lazy-loading 'resources' on Instance uuid 49221ea3-d457-4cf5-97a9-9ae74c4e86fb {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1496.033441] env[62519]: DEBUG nova.network.neutron [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1496.095963] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802186, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066767} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.096309] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1496.097087] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d11ee7-f34c-433e-aede-606a5eab985c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.118158] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939/1462d213-3f9a-4c60-8056-0b68f20a4939.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1496.121222] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32e1234b-b4e4-4165-b65c-09ae505dc2be {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.143726] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1496.143726] env[62519]: value = "task-1802187" [ 1496.143726] env[62519]: _type = "Task" [ 1496.143726] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.156779] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.219143] env[62519]: DEBUG nova.network.neutron [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Updating instance_info_cache with network_info: [{"id": "d1be096c-c8b0-4dd6-98d9-8289626a2f66", "address": "fa:16:3e:6b:57:e2", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1be096c-c8", "ovs_interfaceid": "d1be096c-c8b0-4dd6-98d9-8289626a2f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.435886] env[62519]: INFO nova.compute.manager [-] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Took 1.30 seconds to deallocate network for instance. [ 1496.463509] env[62519]: DEBUG nova.compute.utils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1496.468052] env[62519]: DEBUG nova.compute.manager [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1496.468052] env[62519]: DEBUG nova.network.neutron [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1496.522903] env[62519]: DEBUG nova.policy [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '936d1af31c7440ef8d487ba090c603f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48df3b387a6f43459978ee37df8d8a8f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1496.561596] env[62519]: DEBUG nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received event network-vif-plugged-6a3b6bdf-835f-429c-982d-00028308b4b2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1496.561863] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Acquiring lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.562122] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.562269] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.562431] env[62519]: DEBUG nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] No waiting events found dispatching network-vif-plugged-6a3b6bdf-835f-429c-982d-00028308b4b2 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1496.562583] env[62519]: WARNING nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received unexpected event network-vif-plugged-6a3b6bdf-835f-429c-982d-00028308b4b2 for instance with vm_state building and task_state spawning. [ 1496.562827] env[62519]: DEBUG nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Received event network-vif-plugged-d1be096c-c8b0-4dd6-98d9-8289626a2f66 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1496.562880] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Acquiring lock "f59a31e4-7fb9-4de7-b35f-da811a305f85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.563064] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Lock "f59a31e4-7fb9-4de7-b35f-da811a305f85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.563241] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Lock "f59a31e4-7fb9-4de7-b35f-da811a305f85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.563393] env[62519]: DEBUG nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] No waiting events found dispatching network-vif-plugged-d1be096c-c8b0-4dd6-98d9-8289626a2f66 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1496.563540] env[62519]: WARNING nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Received unexpected event network-vif-plugged-d1be096c-c8b0-4dd6-98d9-8289626a2f66 for instance with vm_state building and task_state spawning. [ 1496.563689] env[62519]: DEBUG nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received event network-changed-6a3b6bdf-835f-429c-982d-00028308b4b2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1496.563850] env[62519]: DEBUG nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Refreshing instance network info cache due to event network-changed-6a3b6bdf-835f-429c-982d-00028308b4b2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1496.564034] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Acquiring lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.564173] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Acquired lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.564327] env[62519]: DEBUG nova.network.neutron [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Refreshing network info cache for port 6a3b6bdf-835f-429c-982d-00028308b4b2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1496.660169] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802187, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.688403] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "40c7a9b8-d541-464a-ba87-76cfc183ae31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.723638] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "refresh_cache-f59a31e4-7fb9-4de7-b35f-da811a305f85" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.724018] env[62519]: DEBUG nova.compute.manager [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Instance network_info: |[{"id": "d1be096c-c8b0-4dd6-98d9-8289626a2f66", "address": "fa:16:3e:6b:57:e2", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1be096c-c8", "ovs_interfaceid": "d1be096c-c8b0-4dd6-98d9-8289626a2f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1496.724790] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:57:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1be096c-c8b0-4dd6-98d9-8289626a2f66', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1496.734300] env[62519]: DEBUG oslo.service.loopingcall [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1496.736278] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1496.736571] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2840da2-859b-4d69-9000-e18a15f0f2d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.768129] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1496.768129] env[62519]: value = "task-1802188" [ 1496.768129] env[62519]: _type = "Task" [ 1496.768129] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.776898] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802188, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.946155] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.970714] env[62519]: DEBUG nova.compute.manager [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1496.998653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-583dd43c-6768-4d96-8b92-a6ff282828ba tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "40c7a9b8-d541-464a-ba87-76cfc183ae31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.160s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.000207] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "40c7a9b8-d541-464a-ba87-76cfc183ae31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.312s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.000375] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "40c7a9b8-d541-464a-ba87-76cfc183ae31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.000581] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "40c7a9b8-d541-464a-ba87-76cfc183ae31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.000746] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "40c7a9b8-d541-464a-ba87-76cfc183ae31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.006135] env[62519]: INFO nova.compute.manager [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Terminating instance [ 1497.108761] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789ec068-b7f2-4a53-9d75-cd6fbe149cd6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.123446] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522a3797-a3c6-4501-9cce-3433f7a28c3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.132258] env[62519]: DEBUG nova.network.neutron [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1497.165929] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b19ad6-29d1-433f-ae71-cd6869c50cc5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.179895] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b931fee7-5299-4308-92f7-1fcfcf375ddc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.183728] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802187, 'name': ReconfigVM_Task, 'duration_secs': 0.814914} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.184572] env[62519]: DEBUG nova.network.neutron [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Successfully created port: 54d71145-cb5f-4e36-b3dd-2905575f9742 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1497.186475] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939/1462d213-3f9a-4c60-8056-0b68f20a4939.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1497.187414] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b37924e0-4e24-4470-8ad5-d01fb50e61ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.197975] env[62519]: DEBUG nova.compute.provider_tree [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1497.200457] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1497.200457] env[62519]: value = "task-1802189" [ 1497.200457] env[62519]: _type = "Task" [ 1497.200457] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.210398] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802189, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.278684] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802188, 'name': CreateVM_Task, 'duration_secs': 0.408427} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.278901] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1497.279601] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.279760] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.280089] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1497.280439] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe4b9d10-281b-482a-8b3d-63ad5778a96e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.285593] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1497.285593] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52663538-d0f1-8928-9b1e-f9e68cbf3399" [ 1497.285593] env[62519]: _type = "Task" [ 1497.285593] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.294042] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52663538-d0f1-8928-9b1e-f9e68cbf3399, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.331590] env[62519]: DEBUG nova.network.neutron [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.400190] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Successfully updated port: 7d9ab028-27a4-47e2-8a46-08e1672bd6fb {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1497.503606] env[62519]: DEBUG nova.compute.manager [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1497.509271] env[62519]: DEBUG nova.compute.manager [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1497.509478] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1497.510454] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374b111a-7d83-4c4a-80b6-d0824b6009f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.520939] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1497.521863] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-999bb658-34bf-4230-8d62-c5b4c2a58fb3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.530080] env[62519]: DEBUG oslo_vmware.api [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1497.530080] env[62519]: value = "task-1802190" [ 1497.530080] env[62519]: _type = "Task" [ 1497.530080] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.539800] env[62519]: DEBUG oslo_vmware.api [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.714169] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802189, 'name': Rename_Task, 'duration_secs': 0.174578} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.714363] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1497.714614] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e1013e3-28b8-4981-baa4-2d84e4b26723 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.722473] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1497.722473] env[62519]: value = "task-1802191" [ 1497.722473] env[62519]: _type = "Task" [ 1497.722473] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.723389] env[62519]: ERROR nova.scheduler.client.report [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [req-62b7873b-81ac-4769-9c24-37799193b51e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-62b7873b-81ac-4769-9c24-37799193b51e"}]} [ 1497.734154] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802191, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.739370] env[62519]: DEBUG nova.scheduler.client.report [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1497.754816] env[62519]: DEBUG nova.scheduler.client.report [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1497.755141] env[62519]: DEBUG nova.compute.provider_tree [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1497.769192] env[62519]: DEBUG nova.scheduler.client.report [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1497.791087] env[62519]: DEBUG nova.scheduler.client.report [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1497.799983] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52663538-d0f1-8928-9b1e-f9e68cbf3399, 'name': SearchDatastore_Task, 'duration_secs': 0.015627} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.800368] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.801299] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1497.801299] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.801299] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.801299] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1497.801299] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ed7624a-d991-4afe-97fd-18315963c1e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.811410] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1497.811558] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1497.812426] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9982d001-ff4c-4986-8748-73c7bb63927b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.822031] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1497.822031] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ab01d5-0cbc-d5fe-1954-c079c38fd9f2" [ 1497.822031] env[62519]: _type = "Task" [ 1497.822031] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.832709] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ab01d5-0cbc-d5fe-1954-c079c38fd9f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.836020] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Releasing lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.836020] env[62519]: DEBUG nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Received event network-changed-d1be096c-c8b0-4dd6-98d9-8289626a2f66 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1497.836178] env[62519]: DEBUG nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Refreshing instance network info cache due to event network-changed-d1be096c-c8b0-4dd6-98d9-8289626a2f66. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1497.836370] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Acquiring lock "refresh_cache-f59a31e4-7fb9-4de7-b35f-da811a305f85" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.836502] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Acquired lock "refresh_cache-f59a31e4-7fb9-4de7-b35f-da811a305f85" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.836655] env[62519]: DEBUG nova.network.neutron [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Refreshing network info cache for port d1be096c-c8b0-4dd6-98d9-8289626a2f66 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1497.906682] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.906844] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquired lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.906986] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1497.981437] env[62519]: DEBUG nova.compute.manager [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1498.008566] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1498.008812] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1498.008964] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1498.009158] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1498.009298] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1498.009440] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1498.009879] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1498.010043] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1498.010212] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1498.010370] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1498.010539] env[62519]: DEBUG nova.virt.hardware [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1498.015364] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86193c9-dd4b-4e1c-8418-9e52e411a5aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.026367] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c08322c-3710-4bf6-a036-1e065e601220 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.032050] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.050826] env[62519]: DEBUG oslo_vmware.api [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802190, 'name': PowerOffVM_Task, 'duration_secs': 0.399375} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.053530] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1498.053530] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1498.054522] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce791bbf-94a5-40d0-a64f-45f632a8e35d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.151083] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1498.151468] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1498.151724] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Deleting the datastore file [datastore1] 40c7a9b8-d541-464a-ba87-76cfc183ae31 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1498.155047] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d9d7bee-2736-4664-a9e2-c278adca804a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.162698] env[62519]: DEBUG oslo_vmware.api [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for the task: (returnval){ [ 1498.162698] env[62519]: value = "task-1802193" [ 1498.162698] env[62519]: _type = "Task" [ 1498.162698] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.172567] env[62519]: DEBUG oslo_vmware.api [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802193, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.235153] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802191, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.328351] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a3726d-b51e-4d0a-b8ba-c32e8e0ad935 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.339417] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0695a1c7-300e-4310-ae7a-9c43a2dd2d14 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.344177] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ab01d5-0cbc-d5fe-1954-c079c38fd9f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010597} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.346058] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dad7b41e-b413-45c2-a6e6-87d6a5a755ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.373670] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5133bdc1-7546-48a9-98cf-2adf79caa11a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.379697] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1498.379697] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52362729-e403-ceac-c23d-8c9603790edb" [ 1498.379697] env[62519]: _type = "Task" [ 1498.379697] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.387061] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2eb265-0a9a-4cb0-84d5-3cd700092872 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.394393] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52362729-e403-ceac-c23d-8c9603790edb, 'name': SearchDatastore_Task, 'duration_secs': 0.011731} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.395073] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.395416] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f59a31e4-7fb9-4de7-b35f-da811a305f85/f59a31e4-7fb9-4de7-b35f-da811a305f85.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1498.395710] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfe2105d-5f82-41fe-97cd-62aa44e37b5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.405904] env[62519]: DEBUG nova.compute.provider_tree [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1498.414189] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1498.414189] env[62519]: value = "task-1802194" [ 1498.414189] env[62519]: _type = "Task" [ 1498.414189] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.425439] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.469044] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1498.606406] env[62519]: DEBUG nova.network.neutron [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Updated VIF entry in instance network info cache for port d1be096c-c8b0-4dd6-98d9-8289626a2f66. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1498.606406] env[62519]: DEBUG nova.network.neutron [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Updating instance_info_cache with network_info: [{"id": "d1be096c-c8b0-4dd6-98d9-8289626a2f66", "address": "fa:16:3e:6b:57:e2", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1be096c-c8", "ovs_interfaceid": "d1be096c-c8b0-4dd6-98d9-8289626a2f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.673514] env[62519]: DEBUG oslo_vmware.api [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Task: {'id': task-1802193, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.332459} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.673807] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1498.673896] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1498.675044] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1498.675044] env[62519]: INFO nova.compute.manager [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1498.675044] env[62519]: DEBUG oslo.service.loopingcall [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.675044] env[62519]: DEBUG nova.compute.manager [-] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1498.675294] env[62519]: DEBUG nova.network.neutron [-] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1498.706959] env[62519]: DEBUG nova.compute.manager [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received event network-vif-plugged-7d9ab028-27a4-47e2-8a46-08e1672bd6fb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1498.707345] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] Acquiring lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.708552] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.709644] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.709867] env[62519]: DEBUG nova.compute.manager [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] No waiting events found dispatching network-vif-plugged-7d9ab028-27a4-47e2-8a46-08e1672bd6fb {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1498.710059] env[62519]: WARNING nova.compute.manager [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received unexpected event network-vif-plugged-7d9ab028-27a4-47e2-8a46-08e1672bd6fb for instance with vm_state building and task_state spawning. [ 1498.710233] env[62519]: DEBUG nova.compute.manager [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received event network-changed-7d9ab028-27a4-47e2-8a46-08e1672bd6fb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1498.710385] env[62519]: DEBUG nova.compute.manager [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Refreshing instance network info cache due to event network-changed-7d9ab028-27a4-47e2-8a46-08e1672bd6fb. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1498.710564] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] Acquiring lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.740602] env[62519]: DEBUG oslo_vmware.api [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802191, 'name': PowerOnVM_Task, 'duration_secs': 0.523113} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.741076] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1498.741402] env[62519]: DEBUG nova.compute.manager [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1498.743587] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457df472-ddc5-4aa2-ad2f-d21659ceb589 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.926903] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802194, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.966516] env[62519]: DEBUG nova.scheduler.client.report [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1498.966863] env[62519]: DEBUG nova.compute.provider_tree [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 61 to 62 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1498.967819] env[62519]: DEBUG nova.compute.provider_tree [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1499.015347] env[62519]: DEBUG nova.network.neutron [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Successfully updated port: 54d71145-cb5f-4e36-b3dd-2905575f9742 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1499.111950] env[62519]: DEBUG oslo_concurrency.lockutils [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] Releasing lock "refresh_cache-f59a31e4-7fb9-4de7-b35f-da811a305f85" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.112300] env[62519]: DEBUG nova.compute.manager [req-f38d4905-f743-4528-8a49-921debc75aa1 req-4b8fb216-9d47-4873-b90a-5eff3d5e4d8c service nova] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Received event network-vif-deleted-11826675-9830-43c6-a3ed-cc8329005aa2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1499.212754] env[62519]: DEBUG nova.network.neutron [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Updating instance_info_cache with network_info: [{"id": "dd1ebcef-cdc1-4641-8622-443a2365b497", "address": "fa:16:3e:02:07:45", "network": {"id": "b272ddf8-d08f-4e19-a9fc-13fe663dac46", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1317143348", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd1ebcef-cd", "ovs_interfaceid": "dd1ebcef-cdc1-4641-8622-443a2365b497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6a3b6bdf-835f-429c-982d-00028308b4b2", "address": "fa:16:3e:e2:f0:27", "network": {"id": "73b13a51-2f2f-4737-aacc-1d0fdc8db0c8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-602986409", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a3b6bdf-83", "ovs_interfaceid": "6a3b6bdf-835f-429c-982d-00028308b4b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7d9ab028-27a4-47e2-8a46-08e1672bd6fb", "address": "fa:16:3e:ea:33:7c", "network": {"id": "b272ddf8-d08f-4e19-a9fc-13fe663dac46", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1317143348", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9ab028-27", "ovs_interfaceid": "7d9ab028-27a4-47e2-8a46-08e1672bd6fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.268133] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.426171] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656385} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.426455] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f59a31e4-7fb9-4de7-b35f-da811a305f85/f59a31e4-7fb9-4de7-b35f-da811a305f85.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1499.426669] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1499.426968] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e2ace99-5880-4be2-9d27-58fd80c13393 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.436037] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1499.436037] env[62519]: value = "task-1802195" [ 1499.436037] env[62519]: _type = "Task" [ 1499.436037] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.443533] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.478543] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.519s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.480897] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.533s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.482459] env[62519]: INFO nova.compute.claims [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1499.505393] env[62519]: INFO nova.scheduler.client.report [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Deleted allocations for instance 49221ea3-d457-4cf5-97a9-9ae74c4e86fb [ 1499.520845] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "refresh_cache-1d4b14d3-8832-457e-aaed-462236555f57" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.520989] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquired lock "refresh_cache-1d4b14d3-8832-457e-aaed-462236555f57" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.521151] env[62519]: DEBUG nova.network.neutron [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1499.577642] env[62519]: DEBUG nova.network.neutron [-] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.715511] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Releasing lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.715961] env[62519]: DEBUG nova.compute.manager [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Instance network_info: |[{"id": "dd1ebcef-cdc1-4641-8622-443a2365b497", "address": "fa:16:3e:02:07:45", "network": {"id": "b272ddf8-d08f-4e19-a9fc-13fe663dac46", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1317143348", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd1ebcef-cd", "ovs_interfaceid": "dd1ebcef-cdc1-4641-8622-443a2365b497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6a3b6bdf-835f-429c-982d-00028308b4b2", "address": "fa:16:3e:e2:f0:27", "network": {"id": "73b13a51-2f2f-4737-aacc-1d0fdc8db0c8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-602986409", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a3b6bdf-83", "ovs_interfaceid": "6a3b6bdf-835f-429c-982d-00028308b4b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7d9ab028-27a4-47e2-8a46-08e1672bd6fb", "address": "fa:16:3e:ea:33:7c", "network": {"id": "b272ddf8-d08f-4e19-a9fc-13fe663dac46", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1317143348", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9ab028-27", "ovs_interfaceid": "7d9ab028-27a4-47e2-8a46-08e1672bd6fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1499.716304] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] Acquired lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.716481] env[62519]: DEBUG nova.network.neutron [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Refreshing network info cache for port 7d9ab028-27a4-47e2-8a46-08e1672bd6fb {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.717717] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:07:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd1ebcef-cdc1-4641-8622-443a2365b497', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:f0:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a3b6bdf-835f-429c-982d-00028308b4b2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:33:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d9ab028-27a4-47e2-8a46-08e1672bd6fb', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1499.731659] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Creating folder: Project (08813c8c5d0b45dbab5a05ed08ef9531). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1499.734816] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48429a08-52e4-4de3-8476-c3bf8cff7f14 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.747899] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Created folder: Project (08813c8c5d0b45dbab5a05ed08ef9531) in parent group-v373567. [ 1499.748104] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Creating folder: Instances. Parent ref: group-v373651. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1499.748340] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1721d722-fdac-4c87-8c76-77b174258a4e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.758815] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Created folder: Instances in parent group-v373651. [ 1499.759156] env[62519]: DEBUG oslo.service.loopingcall [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.759366] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1499.759604] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd2b3ec5-2443-48bd-bdf6-5416265e385a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.785980] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1499.785980] env[62519]: value = "task-1802198" [ 1499.785980] env[62519]: _type = "Task" [ 1499.785980] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.794281] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802198, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.823629] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "1462d213-3f9a-4c60-8056-0b68f20a4939" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.823960] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "1462d213-3f9a-4c60-8056-0b68f20a4939" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.824261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "1462d213-3f9a-4c60-8056-0b68f20a4939-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.824471] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "1462d213-3f9a-4c60-8056-0b68f20a4939-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.824678] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "1462d213-3f9a-4c60-8056-0b68f20a4939-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.827019] env[62519]: INFO nova.compute.manager [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Terminating instance [ 1499.946506] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074177} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.946812] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1499.947882] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cee91a-6402-4db4-9fe8-3cd56227a2cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.972699] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] f59a31e4-7fb9-4de7-b35f-da811a305f85/f59a31e4-7fb9-4de7-b35f-da811a305f85.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1499.976739] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeff29c5-80c1-471b-9b4a-4e690f743943 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.002854] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1500.002854] env[62519]: value = "task-1802199" [ 1500.002854] env[62519]: _type = "Task" [ 1500.002854] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.014395] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802199, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.014837] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c79e11bf-bb5b-47ac-8984-42477fd61aad tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "49221ea3-d457-4cf5-97a9-9ae74c4e86fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.571s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.061720] env[62519]: DEBUG nova.network.neutron [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1500.081417] env[62519]: INFO nova.compute.manager [-] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Took 1.41 seconds to deallocate network for instance. [ 1500.150799] env[62519]: DEBUG nova.network.neutron [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Updated VIF entry in instance network info cache for port 7d9ab028-27a4-47e2-8a46-08e1672bd6fb. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1500.151425] env[62519]: DEBUG nova.network.neutron [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Updating instance_info_cache with network_info: [{"id": "dd1ebcef-cdc1-4641-8622-443a2365b497", "address": "fa:16:3e:02:07:45", "network": {"id": "b272ddf8-d08f-4e19-a9fc-13fe663dac46", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1317143348", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd1ebcef-cd", "ovs_interfaceid": "dd1ebcef-cdc1-4641-8622-443a2365b497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6a3b6bdf-835f-429c-982d-00028308b4b2", "address": "fa:16:3e:e2:f0:27", "network": {"id": "73b13a51-2f2f-4737-aacc-1d0fdc8db0c8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-602986409", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a3b6bdf-83", "ovs_interfaceid": "6a3b6bdf-835f-429c-982d-00028308b4b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7d9ab028-27a4-47e2-8a46-08e1672bd6fb", "address": "fa:16:3e:ea:33:7c", "network": {"id": "b272ddf8-d08f-4e19-a9fc-13fe663dac46", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1317143348", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.142", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9ab028-27", "ovs_interfaceid": "7d9ab028-27a4-47e2-8a46-08e1672bd6fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.256182] env[62519]: DEBUG nova.network.neutron [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Updating instance_info_cache with network_info: [{"id": "54d71145-cb5f-4e36-b3dd-2905575f9742", "address": "fa:16:3e:72:78:48", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d71145-cb", "ovs_interfaceid": "54d71145-cb5f-4e36-b3dd-2905575f9742", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.297965] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802198, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.331313] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "refresh_cache-1462d213-3f9a-4c60-8056-0b68f20a4939" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.331529] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquired lock "refresh_cache-1462d213-3f9a-4c60-8056-0b68f20a4939" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.331712] env[62519]: DEBUG nova.network.neutron [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1500.497264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "42497ab5-cce9-4614-a6d1-dffbf6764d7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.497587] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "42497ab5-cce9-4614-a6d1-dffbf6764d7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.497836] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "42497ab5-cce9-4614-a6d1-dffbf6764d7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.498216] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "42497ab5-cce9-4614-a6d1-dffbf6764d7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.498288] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "42497ab5-cce9-4614-a6d1-dffbf6764d7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.503500] env[62519]: INFO nova.compute.manager [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Terminating instance [ 1500.518844] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.589785] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.656375] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd07cb83-4a47-4dcf-8e03-616fe9849f01 req-07a8ac2b-6322-4820-8410-75113f466ab7 service nova] Releasing lock "refresh_cache-cfefa7c8-4986-4ad0-ac20-8784ee44a737" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.759428] env[62519]: DEBUG nova.compute.manager [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Received event network-vif-plugged-54d71145-cb5f-4e36-b3dd-2905575f9742 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1500.759428] env[62519]: DEBUG oslo_concurrency.lockutils [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] Acquiring lock "1d4b14d3-8832-457e-aaed-462236555f57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.759817] env[62519]: DEBUG oslo_concurrency.lockutils [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] Lock "1d4b14d3-8832-457e-aaed-462236555f57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.759817] env[62519]: DEBUG oslo_concurrency.lockutils [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] Lock "1d4b14d3-8832-457e-aaed-462236555f57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.760087] env[62519]: DEBUG nova.compute.manager [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] No waiting events found dispatching network-vif-plugged-54d71145-cb5f-4e36-b3dd-2905575f9742 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1500.760087] env[62519]: WARNING nova.compute.manager [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Received unexpected event network-vif-plugged-54d71145-cb5f-4e36-b3dd-2905575f9742 for instance with vm_state building and task_state spawning. [ 1500.760203] env[62519]: DEBUG nova.compute.manager [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Received event network-changed-54d71145-cb5f-4e36-b3dd-2905575f9742 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1500.760357] env[62519]: DEBUG nova.compute.manager [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Refreshing instance network info cache due to event network-changed-54d71145-cb5f-4e36-b3dd-2905575f9742. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1500.760525] env[62519]: DEBUG oslo_concurrency.lockutils [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] Acquiring lock "refresh_cache-1d4b14d3-8832-457e-aaed-462236555f57" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.763410] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Releasing lock "refresh_cache-1d4b14d3-8832-457e-aaed-462236555f57" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.763696] env[62519]: DEBUG nova.compute.manager [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Instance network_info: |[{"id": "54d71145-cb5f-4e36-b3dd-2905575f9742", "address": "fa:16:3e:72:78:48", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d71145-cb", "ovs_interfaceid": "54d71145-cb5f-4e36-b3dd-2905575f9742", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1500.765248] env[62519]: DEBUG oslo_concurrency.lockutils [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] Acquired lock "refresh_cache-1d4b14d3-8832-457e-aaed-462236555f57" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.765248] env[62519]: DEBUG nova.network.neutron [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Refreshing network info cache for port 54d71145-cb5f-4e36-b3dd-2905575f9742 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1500.765806] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:78:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54d71145-cb5f-4e36-b3dd-2905575f9742', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1500.773759] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Creating folder: Project (48df3b387a6f43459978ee37df8d8a8f). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1500.775356] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bbf4eb9-27ba-4a7a-91df-6d84c530a2c3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.794531] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Created folder: Project (48df3b387a6f43459978ee37df8d8a8f) in parent group-v373567. [ 1500.794531] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Creating folder: Instances. Parent ref: group-v373654. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1500.797116] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ef42ff5-a7ca-43c3-a5da-642e64d63a62 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.799045] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802198, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.812613] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Created folder: Instances in parent group-v373654. [ 1500.812884] env[62519]: DEBUG oslo.service.loopingcall [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1500.816077] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1500.816352] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2bad0a9-6d95-4982-b7a1-fc9a951b4033 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.839075] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1500.839075] env[62519]: value = "task-1802202" [ 1500.839075] env[62519]: _type = "Task" [ 1500.839075] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.853555] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802202, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.854396] env[62519]: DEBUG nova.network.neutron [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1500.909699] env[62519]: DEBUG nova.network.neutron [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.011588] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "refresh_cache-42497ab5-cce9-4614-a6d1-dffbf6764d7b" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.011871] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquired lock "refresh_cache-42497ab5-cce9-4614-a6d1-dffbf6764d7b" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.011974] env[62519]: DEBUG nova.network.neutron [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1501.016807] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802199, 'name': ReconfigVM_Task, 'duration_secs': 0.850027} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.019591] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Reconfigured VM instance instance-0000001e to attach disk [datastore1] f59a31e4-7fb9-4de7-b35f-da811a305f85/f59a31e4-7fb9-4de7-b35f-da811a305f85.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1501.021674] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10d3c9c9-eab1-443d-8b0b-7e2091e483e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.032110] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1501.032110] env[62519]: value = "task-1802203" [ 1501.032110] env[62519]: _type = "Task" [ 1501.032110] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.033456] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00553c0-9531-4a7f-b1a4-21cf6b044666 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.048458] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5950c0-9f80-482e-bdfb-1c76706194c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.051692] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802203, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.087152] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7d60a6-9dca-42f2-aa02-531c406ebfab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.096365] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deebcd3c-27a8-48e6-b413-860878c30a95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.111660] env[62519]: DEBUG nova.compute.provider_tree [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1501.299194] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802198, 'name': CreateVM_Task, 'duration_secs': 1.202299} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.299376] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1501.300398] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.300537] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.300845] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1501.301529] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a2bdf41-3b8f-44e8-bd0a-5102b1ab2c59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.308520] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1501.308520] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5277ce75-2322-6e35-4b81-e3cd4ac82739" [ 1501.308520] env[62519]: _type = "Task" [ 1501.308520] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.316982] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5277ce75-2322-6e35-4b81-e3cd4ac82739, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.350333] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802202, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.418141] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Releasing lock "refresh_cache-1462d213-3f9a-4c60-8056-0b68f20a4939" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.418622] env[62519]: DEBUG nova.compute.manager [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1501.418868] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1501.419858] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affeed76-f2b8-4d0b-ad8e-e205134355a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.429508] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1501.430241] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2bbf44f-cf4a-439c-bc67-7327ac8d3bba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.437228] env[62519]: DEBUG oslo_vmware.api [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1501.437228] env[62519]: value = "task-1802204" [ 1501.437228] env[62519]: _type = "Task" [ 1501.437228] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.446203] env[62519]: DEBUG oslo_vmware.api [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.507610] env[62519]: DEBUG nova.network.neutron [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Updated VIF entry in instance network info cache for port 54d71145-cb5f-4e36-b3dd-2905575f9742. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1501.507610] env[62519]: DEBUG nova.network.neutron [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Updating instance_info_cache with network_info: [{"id": "54d71145-cb5f-4e36-b3dd-2905575f9742", "address": "fa:16:3e:72:78:48", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d71145-cb", "ovs_interfaceid": "54d71145-cb5f-4e36-b3dd-2905575f9742", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.538215] env[62519]: DEBUG nova.network.neutron [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1501.551349] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802203, 'name': Rename_Task, 'duration_secs': 0.198943} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.553782] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1501.554141] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f370ec71-e2fe-4e7f-97d5-e44b9e6d92aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.562254] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1501.562254] env[62519]: value = "task-1802205" [ 1501.562254] env[62519]: _type = "Task" [ 1501.562254] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.571555] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.592226] env[62519]: DEBUG nova.network.neutron [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.643667] env[62519]: DEBUG nova.scheduler.client.report [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 62 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1501.643959] env[62519]: DEBUG nova.compute.provider_tree [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 62 to 63 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1501.644178] env[62519]: DEBUG nova.compute.provider_tree [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1501.819980] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5277ce75-2322-6e35-4b81-e3cd4ac82739, 'name': SearchDatastore_Task, 'duration_secs': 0.027907} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.820380] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.820541] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1501.820768] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.820914] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.821107] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1501.821371] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dc354f5-1518-4cb1-95fa-4ca42e398c30 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.831712] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1501.831910] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1501.832678] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59af9681-6890-4a85-b4f0-ddb883e0222d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.838879] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1501.838879] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5203f008-f731-28b9-84d0-6cf2cbed67bb" [ 1501.838879] env[62519]: _type = "Task" [ 1501.838879] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.851063] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5203f008-f731-28b9-84d0-6cf2cbed67bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.853463] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802202, 'name': CreateVM_Task, 'duration_secs': 0.664451} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.853599] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1501.854273] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.854440] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.854740] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1501.854976] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cc6b289-9ed3-4eeb-9519-9921a42b225e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.859883] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1501.859883] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d8f5b2-27d7-e144-3ccf-98c8a71ccadb" [ 1501.859883] env[62519]: _type = "Task" [ 1501.859883] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.869122] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d8f5b2-27d7-e144-3ccf-98c8a71ccadb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.947750] env[62519]: DEBUG oslo_vmware.api [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802204, 'name': PowerOffVM_Task, 'duration_secs': 0.143644} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.948021] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1501.948192] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1501.948451] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d375815-a0b7-46ab-b284-6930cfd6c9cd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.975955] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1501.976242] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1501.976429] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Deleting the datastore file [datastore1] 1462d213-3f9a-4c60-8056-0b68f20a4939 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1501.976768] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ecb16e4-7da9-4f3b-b0c5-e2cb2c1bae1e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.984598] env[62519]: DEBUG oslo_vmware.api [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for the task: (returnval){ [ 1501.984598] env[62519]: value = "task-1802207" [ 1501.984598] env[62519]: _type = "Task" [ 1501.984598] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.993807] env[62519]: DEBUG oslo_vmware.api [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802207, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.009501] env[62519]: DEBUG oslo_concurrency.lockutils [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] Releasing lock "refresh_cache-1d4b14d3-8832-457e-aaed-462236555f57" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.009778] env[62519]: DEBUG nova.compute.manager [req-584738df-2295-4c7d-a5dc-c954e7b91769 req-95eec738-97c1-4dd7-a6f0-464a57899d60 service nova] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Received event network-vif-deleted-e1c64107-2501-4168-900e-4b4153b778b9 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1502.073817] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802205, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.096045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Releasing lock "refresh_cache-42497ab5-cce9-4614-a6d1-dffbf6764d7b" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.096045] env[62519]: DEBUG nova.compute.manager [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1502.096045] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1502.097045] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc78c96-bd38-46a3-8b34-0043f34fd257 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.106803] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1502.107071] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5aaa4f72-0479-4dec-b5d4-c4f27085cd63 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.114102] env[62519]: DEBUG oslo_vmware.api [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1502.114102] env[62519]: value = "task-1802208" [ 1502.114102] env[62519]: _type = "Task" [ 1502.114102] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.123638] env[62519]: DEBUG oslo_vmware.api [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.149931] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.669s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.150452] env[62519]: DEBUG nova.compute.manager [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1502.153230] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.364s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.154799] env[62519]: DEBUG nova.objects.instance [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lazy-loading 'resources' on Instance uuid 021c3287-5887-457e-9b3a-233308fb9b23 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1502.350017] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5203f008-f731-28b9-84d0-6cf2cbed67bb, 'name': SearchDatastore_Task, 'duration_secs': 0.01396} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.350825] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c535eb62-ea98-4b8b-926e-faf60d762c5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.357042] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1502.357042] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5241ed49-ac45-e79b-4c83-1f8b093bb527" [ 1502.357042] env[62519]: _type = "Task" [ 1502.357042] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.369155] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d8f5b2-27d7-e144-3ccf-98c8a71ccadb, 'name': SearchDatastore_Task, 'duration_secs': 0.009571} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.372148] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.372386] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1502.372585] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.372789] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5241ed49-ac45-e79b-4c83-1f8b093bb527, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.495034] env[62519]: DEBUG oslo_vmware.api [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Task: {'id': task-1802207, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188181} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.495156] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1502.495234] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1502.495448] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1502.495578] env[62519]: INFO nova.compute.manager [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1502.495822] env[62519]: DEBUG oslo.service.loopingcall [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1502.496068] env[62519]: DEBUG nova.compute.manager [-] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1502.496178] env[62519]: DEBUG nova.network.neutron [-] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1502.514302] env[62519]: DEBUG nova.network.neutron [-] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1502.574250] env[62519]: DEBUG oslo_vmware.api [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802205, 'name': PowerOnVM_Task, 'duration_secs': 0.562721} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.574526] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1502.574722] env[62519]: INFO nova.compute.manager [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Took 8.33 seconds to spawn the instance on the hypervisor. [ 1502.574898] env[62519]: DEBUG nova.compute.manager [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1502.575667] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa165c3-3bb7-4706-bbaa-ee33cedeeafc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.624702] env[62519]: DEBUG oslo_vmware.api [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802208, 'name': PowerOffVM_Task, 'duration_secs': 0.170573} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.624936] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1502.625131] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1502.625601] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e5fecb5-07f9-4ff3-b775-47bfb8ac1e3d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.654422] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1502.654765] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1502.655062] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Deleting the datastore file [datastore1] 42497ab5-cce9-4614-a6d1-dffbf6764d7b {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1502.656620] env[62519]: DEBUG nova.compute.utils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1502.657915] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cb65cac-5d49-43b4-b673-9115cee856ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.662650] env[62519]: DEBUG nova.compute.manager [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1502.662818] env[62519]: DEBUG nova.network.neutron [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1502.670092] env[62519]: DEBUG oslo_vmware.api [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for the task: (returnval){ [ 1502.670092] env[62519]: value = "task-1802210" [ 1502.670092] env[62519]: _type = "Task" [ 1502.670092] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.679796] env[62519]: DEBUG oslo_vmware.api [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.712127] env[62519]: DEBUG nova.policy [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12d88579b24941a0be744afe44126360', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df6727c290724a8ebef5188c77e91399', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1502.870366] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5241ed49-ac45-e79b-4c83-1f8b093bb527, 'name': SearchDatastore_Task, 'duration_secs': 0.044403} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.870366] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.870366] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] cfefa7c8-4986-4ad0-ac20-8784ee44a737/cfefa7c8-4986-4ad0-ac20-8784ee44a737.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1502.871016] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.873664] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1502.873664] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ab76303-8a8e-4150-a5bd-752ba412f6a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.873664] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d81acf9a-ea4c-48b5-9403-0e21fe6b26b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.884502] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1502.884502] env[62519]: value = "task-1802211" [ 1502.884502] env[62519]: _type = "Task" [ 1502.884502] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.885700] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1502.885870] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1502.889754] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-532f48dc-f682-4308-a113-49fc0c773de7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.899188] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1502.899188] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d4ebc7-d389-04c2-cf64-902d6267b42d" [ 1502.899188] env[62519]: _type = "Task" [ 1502.899188] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.902598] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.912655] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d4ebc7-d389-04c2-cf64-902d6267b42d, 'name': SearchDatastore_Task, 'duration_secs': 0.010928} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.916506] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14204846-3d62-4d97-8be7-fe1cb146647d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.923022] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1502.923022] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520977f6-a6c6-7165-2a79-c59769f3932f" [ 1502.923022] env[62519]: _type = "Task" [ 1502.923022] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.934775] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520977f6-a6c6-7165-2a79-c59769f3932f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.978923] env[62519]: DEBUG nova.network.neutron [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Successfully created port: 8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1503.017043] env[62519]: DEBUG nova.network.neutron [-] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.092095] env[62519]: INFO nova.compute.manager [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Took 46.30 seconds to build instance. [ 1503.162724] env[62519]: DEBUG nova.compute.manager [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1503.183313] env[62519]: DEBUG oslo_vmware.api [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Task: {'id': task-1802210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139941} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.183313] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1503.183313] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1503.183447] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1503.183537] env[62519]: INFO nova.compute.manager [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1503.183772] env[62519]: DEBUG oslo.service.loopingcall [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1503.183950] env[62519]: DEBUG nova.compute.manager [-] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1503.184048] env[62519]: DEBUG nova.network.neutron [-] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1503.211770] env[62519]: DEBUG nova.network.neutron [-] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1503.246508] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcb76a4-2ed5-49d1-a167-41d99aefb54e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.257291] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac38939-253f-4ef3-8e39-8e326fc8d33a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.288431] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bc4cf9-3043-4423-9530-522533e5f20d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.297627] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a3b2a6-3147-4034-8742-7932661d4ff1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.314837] env[62519]: DEBUG nova.compute.provider_tree [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1503.397476] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802211, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.434218] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520977f6-a6c6-7165-2a79-c59769f3932f, 'name': SearchDatastore_Task, 'duration_secs': 0.010023} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.434965] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.435355] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1d4b14d3-8832-457e-aaed-462236555f57/1d4b14d3-8832-457e-aaed-462236555f57.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1503.435676] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c536ece-d74e-4bf2-acfd-50cccc2dcc5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.446138] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1503.446138] env[62519]: value = "task-1802212" [ 1503.446138] env[62519]: _type = "Task" [ 1503.446138] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.456122] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802212, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.519726] env[62519]: INFO nova.compute.manager [-] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Took 1.02 seconds to deallocate network for instance. [ 1503.714942] env[62519]: DEBUG nova.network.neutron [-] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.818631] env[62519]: DEBUG nova.scheduler.client.report [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1503.897873] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802211, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615637} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.898308] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] cfefa7c8-4986-4ad0-ac20-8784ee44a737/cfefa7c8-4986-4ad0-ac20-8784ee44a737.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1503.898535] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1503.898783] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-080351fa-10d7-43f0-8081-8fe750a62af1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.906971] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1503.906971] env[62519]: value = "task-1802213" [ 1503.906971] env[62519]: _type = "Task" [ 1503.906971] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.919784] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802213, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.957431] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802212, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.026803] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.174516] env[62519]: DEBUG nova.compute.manager [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1504.205226] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1504.205461] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1504.205664] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1504.205999] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1504.206206] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1504.206361] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1504.206585] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1504.206745] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1504.206932] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1504.207138] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1504.207397] env[62519]: DEBUG nova.virt.hardware [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1504.209038] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527aa7d4-29b8-4589-9457-3a73f680f138 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.217995] env[62519]: INFO nova.compute.manager [-] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Took 1.03 seconds to deallocate network for instance. [ 1504.227835] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95225565-aa97-4035-893d-4c711c716f1b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.328546] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.330720] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.504s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.333473] env[62519]: INFO nova.compute.claims [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1504.357128] env[62519]: INFO nova.scheduler.client.report [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Deleted allocations for instance 021c3287-5887-457e-9b3a-233308fb9b23 [ 1504.421206] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802213, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083463} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.421206] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1504.421792] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae64b00-cc8a-470d-9989-e57bfd92abe6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.452460] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] cfefa7c8-4986-4ad0-ac20-8784ee44a737/cfefa7c8-4986-4ad0-ac20-8784ee44a737.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1504.453147] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32927ff3-0387-4d31-8cf9-2051ad73bed4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.480053] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802212, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851564} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.481575] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1d4b14d3-8832-457e-aaed-462236555f57/1d4b14d3-8832-457e-aaed-462236555f57.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1504.481700] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1504.482017] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1504.482017] env[62519]: value = "task-1802214" [ 1504.482017] env[62519]: _type = "Task" [ 1504.482017] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.482307] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33390c02-9e0c-4c08-9b59-bafaaf97fa6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.489159] env[62519]: DEBUG nova.compute.manager [req-7aa5eac7-5df2-40ae-8a1d-04c5a5ea3b09 req-ed9d2913-bac9-4f1b-a0cc-0434faf660a5 service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Received event network-vif-plugged-8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1504.489369] env[62519]: DEBUG oslo_concurrency.lockutils [req-7aa5eac7-5df2-40ae-8a1d-04c5a5ea3b09 req-ed9d2913-bac9-4f1b-a0cc-0434faf660a5 service nova] Acquiring lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.489569] env[62519]: DEBUG oslo_concurrency.lockutils [req-7aa5eac7-5df2-40ae-8a1d-04c5a5ea3b09 req-ed9d2913-bac9-4f1b-a0cc-0434faf660a5 service nova] Lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.489731] env[62519]: DEBUG oslo_concurrency.lockutils [req-7aa5eac7-5df2-40ae-8a1d-04c5a5ea3b09 req-ed9d2913-bac9-4f1b-a0cc-0434faf660a5 service nova] Lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.489895] env[62519]: DEBUG nova.compute.manager [req-7aa5eac7-5df2-40ae-8a1d-04c5a5ea3b09 req-ed9d2913-bac9-4f1b-a0cc-0434faf660a5 service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] No waiting events found dispatching network-vif-plugged-8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1504.490160] env[62519]: WARNING nova.compute.manager [req-7aa5eac7-5df2-40ae-8a1d-04c5a5ea3b09 req-ed9d2913-bac9-4f1b-a0cc-0434faf660a5 service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Received unexpected event network-vif-plugged-8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4 for instance with vm_state building and task_state spawning. [ 1504.497161] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802214, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.498916] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1504.498916] env[62519]: value = "task-1802215" [ 1504.498916] env[62519]: _type = "Task" [ 1504.498916] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.509662] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.590472] env[62519]: DEBUG nova.network.neutron [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Successfully updated port: 8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1504.600805] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a1c40e59-33e7-4dc4-932b-b210029267f9 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "f59a31e4-7fb9-4de7-b35f-da811a305f85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.438s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.728064] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.769069] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "dac173ff-1807-405f-a59c-bb2efef62a0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.769069] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "dac173ff-1807-405f-a59c-bb2efef62a0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.868932] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97f971da-2cc7-4dd4-b5fa-02fbf4a6a87f tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "021c3287-5887-457e-9b3a-233308fb9b23" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.249s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.998125] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802214, 'name': ReconfigVM_Task, 'duration_secs': 0.414931} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.999125] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Reconfigured VM instance instance-0000001d to attach disk [datastore1] cfefa7c8-4986-4ad0-ac20-8784ee44a737/cfefa7c8-4986-4ad0-ac20-8784ee44a737.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1505.000020] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-61eba885-9f73-4673-bb0a-be5afaeb394c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.014132] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073246} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.015491] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1505.015847] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1505.015847] env[62519]: value = "task-1802216" [ 1505.015847] env[62519]: _type = "Task" [ 1505.015847] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.016570] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe73d20-2121-45aa-b78a-5f333a7b5b23 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.047827] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 1d4b14d3-8832-457e-aaed-462236555f57/1d4b14d3-8832-457e-aaed-462236555f57.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1505.048196] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802216, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.048469] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c20d4693-5d61-46a9-b1af-08ec729a198e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.074391] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1505.074391] env[62519]: value = "task-1802217" [ 1505.074391] env[62519]: _type = "Task" [ 1505.074391] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.083343] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802217, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.093181] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-51eaac08-75fd-49f9-9b1a-cc2a2d799634" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.093334] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-51eaac08-75fd-49f9-9b1a-cc2a2d799634" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.093493] env[62519]: DEBUG nova.network.neutron [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1505.103592] env[62519]: DEBUG nova.compute.manager [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1505.531981] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802216, 'name': Rename_Task, 'duration_secs': 0.331228} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.532282] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1505.532525] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37e2c865-79ba-46e9-9d96-2f182ae4849c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.539114] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1505.539114] env[62519]: value = "task-1802218" [ 1505.539114] env[62519]: _type = "Task" [ 1505.539114] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.547428] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.583274] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802217, 'name': ReconfigVM_Task, 'duration_secs': 0.390052} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.583801] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 1d4b14d3-8832-457e-aaed-462236555f57/1d4b14d3-8832-457e-aaed-462236555f57.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1505.584348] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3e5dd71-69f7-4f1f-bdfd-d2524122c8c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.592374] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1505.592374] env[62519]: value = "task-1802219" [ 1505.592374] env[62519]: _type = "Task" [ 1505.592374] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.603953] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802219, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.629518] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.647128] env[62519]: DEBUG nova.network.neutron [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1505.799715] env[62519]: DEBUG nova.network.neutron [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Updating instance_info_cache with network_info: [{"id": "8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4", "address": "fa:16:3e:87:ce:7c", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8473d03f-ba", "ovs_interfaceid": "8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.925245] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f50855-8cc7-4e35-82da-fcc869078105 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.934172] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11c35b8-5edf-4467-bdb5-ed72000c92bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.968035] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37ece7d-2c9d-428f-b5a7-23ccddd7b98a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.977326] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124faac3-dba1-4e97-9215-315ef5051706 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.991573] env[62519]: DEBUG nova.compute.provider_tree [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1506.050016] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802218, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.103657] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802219, 'name': Rename_Task, 'duration_secs': 0.165095} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.103943] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1506.104266] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7591dfd4-90ac-48b7-8559-a9f65247abda {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.111442] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1506.111442] env[62519]: value = "task-1802220" [ 1506.111442] env[62519]: _type = "Task" [ 1506.111442] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.121049] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802220, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.302366] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-51eaac08-75fd-49f9-9b1a-cc2a2d799634" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.303043] env[62519]: DEBUG nova.compute.manager [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Instance network_info: |[{"id": "8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4", "address": "fa:16:3e:87:ce:7c", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8473d03f-ba", "ovs_interfaceid": "8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1506.304299] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:ce:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1506.317475] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Creating folder: Project (df6727c290724a8ebef5188c77e91399). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1506.318650] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5c35dc2-a32f-4674-81c8-680ef133464e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.331965] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Created folder: Project (df6727c290724a8ebef5188c77e91399) in parent group-v373567. [ 1506.332279] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Creating folder: Instances. Parent ref: group-v373657. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1506.332597] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85e8f913-e6e1-492c-bbf3-a966af177e39 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.345768] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Created folder: Instances in parent group-v373657. [ 1506.346105] env[62519]: DEBUG oslo.service.loopingcall [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1506.346319] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1506.346527] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7be3b8c9-bffc-424c-a1e7-1b15d56af260 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.367392] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1506.367392] env[62519]: value = "task-1802223" [ 1506.367392] env[62519]: _type = "Task" [ 1506.367392] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.378187] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802223, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.495250] env[62519]: DEBUG nova.scheduler.client.report [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1506.516881] env[62519]: DEBUG nova.compute.manager [req-09e80ab4-75a9-4a81-aa36-a8d2f71b3ac8 req-bab0f253-7186-4df1-ac57-e4b45d643fcc service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Received event network-changed-8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1506.517344] env[62519]: DEBUG nova.compute.manager [req-09e80ab4-75a9-4a81-aa36-a8d2f71b3ac8 req-bab0f253-7186-4df1-ac57-e4b45d643fcc service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Refreshing instance network info cache due to event network-changed-8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1506.517603] env[62519]: DEBUG oslo_concurrency.lockutils [req-09e80ab4-75a9-4a81-aa36-a8d2f71b3ac8 req-bab0f253-7186-4df1-ac57-e4b45d643fcc service nova] Acquiring lock "refresh_cache-51eaac08-75fd-49f9-9b1a-cc2a2d799634" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.517747] env[62519]: DEBUG oslo_concurrency.lockutils [req-09e80ab4-75a9-4a81-aa36-a8d2f71b3ac8 req-bab0f253-7186-4df1-ac57-e4b45d643fcc service nova] Acquired lock "refresh_cache-51eaac08-75fd-49f9-9b1a-cc2a2d799634" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.517907] env[62519]: DEBUG nova.network.neutron [req-09e80ab4-75a9-4a81-aa36-a8d2f71b3ac8 req-bab0f253-7186-4df1-ac57-e4b45d643fcc service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Refreshing network info cache for port 8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1506.554095] env[62519]: DEBUG oslo_vmware.api [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802218, 'name': PowerOnVM_Task, 'duration_secs': 0.923083} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.554437] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1506.554672] env[62519]: INFO nova.compute.manager [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Took 15.10 seconds to spawn the instance on the hypervisor. [ 1506.554895] env[62519]: DEBUG nova.compute.manager [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1506.555780] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df46eb86-3c6d-40cf-a5ef-74a5136fd2b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.585411] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "56790738-4759-468a-9f43-f9c2bc2de23a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.585411] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "56790738-4759-468a-9f43-f9c2bc2de23a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.585411] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "56790738-4759-468a-9f43-f9c2bc2de23a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.585411] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "56790738-4759-468a-9f43-f9c2bc2de23a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.585411] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "56790738-4759-468a-9f43-f9c2bc2de23a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.587169] env[62519]: INFO nova.compute.manager [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Terminating instance [ 1506.622493] env[62519]: DEBUG oslo_vmware.api [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802220, 'name': PowerOnVM_Task, 'duration_secs': 0.482197} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.622818] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1506.623906] env[62519]: INFO nova.compute.manager [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Took 8.64 seconds to spawn the instance on the hypervisor. [ 1506.624176] env[62519]: DEBUG nova.compute.manager [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1506.625774] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161eed5e-afcd-4572-bf16-0a890436801d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.679040] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.679333] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.679545] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.679743] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.679890] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.681944] env[62519]: INFO nova.compute.manager [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Terminating instance [ 1506.878703] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802223, 'name': CreateVM_Task, 'duration_secs': 0.414187} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.878894] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1506.879616] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.879773] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.880101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1506.880669] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1c9082b-419f-4f4e-abf1-1b03f24fd72c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.886014] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1506.886014] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5259a2e2-91a5-001e-8c13-c2ed5c884bc2" [ 1506.886014] env[62519]: _type = "Task" [ 1506.886014] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.894805] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5259a2e2-91a5-001e-8c13-c2ed5c884bc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.001036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.001497] env[62519]: DEBUG nova.compute.manager [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1507.005205] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.482s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.007329] env[62519]: INFO nova.compute.claims [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1507.077484] env[62519]: INFO nova.compute.manager [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Took 54.58 seconds to build instance. [ 1507.091147] env[62519]: DEBUG nova.compute.manager [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1507.091383] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1507.095514] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32085e15-ad12-4e95-a4cc-d132a5e40ddc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.105650] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1507.105881] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d415028-eafc-437d-8415-b002b9ffbdf6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.112894] env[62519]: DEBUG oslo_vmware.api [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1507.112894] env[62519]: value = "task-1802224" [ 1507.112894] env[62519]: _type = "Task" [ 1507.112894] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.125319] env[62519]: DEBUG oslo_vmware.api [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.146126] env[62519]: INFO nova.compute.manager [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Took 41.60 seconds to build instance. [ 1507.185978] env[62519]: DEBUG nova.compute.manager [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1507.186230] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1507.188737] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8158be6-e001-4cde-bc3e-6f340ea3b31a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.197796] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1507.198175] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22db03bf-880b-4952-86bb-3c2afed954ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.205966] env[62519]: DEBUG oslo_vmware.api [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1507.205966] env[62519]: value = "task-1802225" [ 1507.205966] env[62519]: _type = "Task" [ 1507.205966] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.218374] env[62519]: DEBUG oslo_vmware.api [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.266333] env[62519]: DEBUG nova.network.neutron [req-09e80ab4-75a9-4a81-aa36-a8d2f71b3ac8 req-bab0f253-7186-4df1-ac57-e4b45d643fcc service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Updated VIF entry in instance network info cache for port 8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1507.266498] env[62519]: DEBUG nova.network.neutron [req-09e80ab4-75a9-4a81-aa36-a8d2f71b3ac8 req-bab0f253-7186-4df1-ac57-e4b45d643fcc service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Updating instance_info_cache with network_info: [{"id": "8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4", "address": "fa:16:3e:87:ce:7c", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8473d03f-ba", "ovs_interfaceid": "8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.397192] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5259a2e2-91a5-001e-8c13-c2ed5c884bc2, 'name': SearchDatastore_Task, 'duration_secs': 0.022383} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.397520] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.397749] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1507.397979] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.398140] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.398318] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1507.398573] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae509651-f91d-4650-80ff-7f8c4958c3f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.407582] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1507.407841] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1507.408931] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a71db5c5-eb20-4821-9d98-8c50dbdbaa33 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.415699] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1507.415699] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ea5aa7-c14d-4124-495d-bad8df8ce26e" [ 1507.415699] env[62519]: _type = "Task" [ 1507.415699] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.425660] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ea5aa7-c14d-4124-495d-bad8df8ce26e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.516769] env[62519]: DEBUG nova.compute.utils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1507.517554] env[62519]: DEBUG nova.compute.manager [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1507.517554] env[62519]: DEBUG nova.network.neutron [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1507.572645] env[62519]: DEBUG nova.policy [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '936d1af31c7440ef8d487ba090c603f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48df3b387a6f43459978ee37df8d8a8f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1507.623927] env[62519]: DEBUG oslo_vmware.api [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802224, 'name': PowerOffVM_Task, 'duration_secs': 0.314272} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.624318] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1507.624483] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1507.624728] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6245adb0-ef90-49d4-8127-1a0fe594be01 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.652383] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.719729] env[62519]: DEBUG oslo_vmware.api [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802225, 'name': PowerOffVM_Task, 'duration_secs': 0.229786} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.719959] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1507.720162] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1507.720447] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bb8f5c8-c216-4743-87ab-9218e6c61c1b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.769361] env[62519]: DEBUG oslo_concurrency.lockutils [req-09e80ab4-75a9-4a81-aa36-a8d2f71b3ac8 req-bab0f253-7186-4df1-ac57-e4b45d643fcc service nova] Releasing lock "refresh_cache-51eaac08-75fd-49f9-9b1a-cc2a2d799634" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.825513] env[62519]: DEBUG nova.network.neutron [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Successfully created port: 0a18744e-471e-4942-8a22-59aa90a680af {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1507.859388] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1507.859508] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1507.859709] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Deleting the datastore file [datastore1] 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1507.859971] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a98c45a4-c7ac-416b-8a85-0473dda0c0fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.865430] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1507.865636] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1507.865809] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Deleting the datastore file [datastore1] 56790738-4759-468a-9f43-f9c2bc2de23a {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1507.866141] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc2eec63-43ac-4099-81aa-96f1032fa2cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.869822] env[62519]: DEBUG oslo_vmware.api [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1507.869822] env[62519]: value = "task-1802228" [ 1507.869822] env[62519]: _type = "Task" [ 1507.869822] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.874487] env[62519]: DEBUG oslo_vmware.api [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for the task: (returnval){ [ 1507.874487] env[62519]: value = "task-1802229" [ 1507.874487] env[62519]: _type = "Task" [ 1507.874487] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.880996] env[62519]: DEBUG oslo_vmware.api [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.886844] env[62519]: DEBUG oslo_vmware.api [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.929263] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ea5aa7-c14d-4124-495d-bad8df8ce26e, 'name': SearchDatastore_Task, 'duration_secs': 0.013279} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.929868] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8d026c1-a8f5-423f-8018-cc7574f00732 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.936838] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1507.936838] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52658c42-4f41-cbf0-7d04-52ace9bf39a6" [ 1507.936838] env[62519]: _type = "Task" [ 1507.936838] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.949046] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52658c42-4f41-cbf0-7d04-52ace9bf39a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.021672] env[62519]: DEBUG nova.compute.manager [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1508.384372] env[62519]: DEBUG oslo_vmware.api [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138083} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.385251] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1508.385494] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1508.385698] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1508.385876] env[62519]: INFO nova.compute.manager [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1508.386177] env[62519]: DEBUG oslo.service.loopingcall [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1508.389769] env[62519]: DEBUG nova.compute.manager [-] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1508.389769] env[62519]: DEBUG nova.network.neutron [-] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1508.391531] env[62519]: DEBUG oslo_vmware.api [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Task: {'id': task-1802229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133645} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.392076] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1508.392364] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1508.392561] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1508.396018] env[62519]: INFO nova.compute.manager [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1508.396018] env[62519]: DEBUG oslo.service.loopingcall [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1508.396018] env[62519]: DEBUG nova.compute.manager [-] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1508.396018] env[62519]: DEBUG nova.network.neutron [-] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1508.447502] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52658c42-4f41-cbf0-7d04-52ace9bf39a6, 'name': SearchDatastore_Task, 'duration_secs': 0.011057} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.447773] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.448048] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634/51eaac08-75fd-49f9-9b1a-cc2a2d799634.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1508.448797] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-626ca893-af62-4360-b1d1-9a0716235f7f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.460051] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1508.460051] env[62519]: value = "task-1802230" [ 1508.460051] env[62519]: _type = "Task" [ 1508.460051] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.474354] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.587468] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d590639-68af-4fac-aa10-a83b048f703c tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.477s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.589111] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.937s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.589337] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.589542] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.589703] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.594971] env[62519]: INFO nova.compute.manager [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Terminating instance [ 1508.631118] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1c56dc-db08-4303-b636-af01697faea1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.640932] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e54f22d-9da0-47a5-8e4f-0545b89584df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.681253] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d532afdb-73c2-4922-91b8-52d65f7c0d83 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "1d4b14d3-8832-457e-aaed-462236555f57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.362s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.684494] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0eb25f-f4a9-4ad1-9433-c646e483f963 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.697346] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e9b136-eae1-4352-bd70-b1364197e6b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.716469] env[62519]: DEBUG nova.compute.provider_tree [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.742164] env[62519]: DEBUG nova.compute.manager [req-eb2c1599-8b89-47ac-95ce-4cebc893bdce req-f9498ab5-5ad3-4040-afcf-ff0a1b06d0a2 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Received event network-vif-deleted-a8a61a1e-f910-4564-b6af-8507d8141e45 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1508.742295] env[62519]: INFO nova.compute.manager [req-eb2c1599-8b89-47ac-95ce-4cebc893bdce req-f9498ab5-5ad3-4040-afcf-ff0a1b06d0a2 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Neutron deleted interface a8a61a1e-f910-4564-b6af-8507d8141e45; detaching it from the instance and deleting it from the info cache [ 1508.742502] env[62519]: DEBUG nova.network.neutron [req-eb2c1599-8b89-47ac-95ce-4cebc893bdce req-f9498ab5-5ad3-4040-afcf-ff0a1b06d0a2 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.888706] env[62519]: DEBUG nova.compute.manager [req-766a7291-2008-4d76-bd05-4fe9c1b231a6 req-246819cc-3195-4b4b-aec7-2b29065835c6 service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Received event network-vif-deleted-45a2d062-302a-4749-81e8-8a16c4726fe2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1508.889397] env[62519]: INFO nova.compute.manager [req-766a7291-2008-4d76-bd05-4fe9c1b231a6 req-246819cc-3195-4b4b-aec7-2b29065835c6 service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Neutron deleted interface 45a2d062-302a-4749-81e8-8a16c4726fe2; detaching it from the instance and deleting it from the info cache [ 1508.889489] env[62519]: DEBUG nova.network.neutron [req-766a7291-2008-4d76-bd05-4fe9c1b231a6 req-246819cc-3195-4b4b-aec7-2b29065835c6 service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.912292] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Acquiring lock "91902e7f-8c15-447b-a3a8-04433434b1b6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.912533] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.970615] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802230, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.039747] env[62519]: DEBUG nova.compute.manager [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1509.070480] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1509.070774] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1509.070874] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1509.071066] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1509.071215] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1509.071350] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1509.071548] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1509.071700] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1509.071865] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1509.072034] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1509.072212] env[62519]: DEBUG nova.virt.hardware [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1509.073116] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7896566-386b-4e7f-ae2f-3f7df5f77f77 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.082194] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce419b7d-45db-4f4d-a1d0-005e655e8eab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.096023] env[62519]: DEBUG nova.compute.manager [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1509.098933] env[62519]: DEBUG nova.compute.manager [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1509.099138] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1509.100281] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35ad5dc-bd1e-4980-be65-316c56724bc4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.108064] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1509.108332] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27700bbb-9a33-4bdc-a9df-988e368aa543 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.117333] env[62519]: DEBUG oslo_vmware.api [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1509.117333] env[62519]: value = "task-1802231" [ 1509.117333] env[62519]: _type = "Task" [ 1509.117333] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.128820] env[62519]: DEBUG oslo_vmware.api [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802231, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.189095] env[62519]: DEBUG nova.compute.manager [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1509.192691] env[62519]: DEBUG nova.network.neutron [-] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.220821] env[62519]: DEBUG nova.scheduler.client.report [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1509.246456] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9b7c4b8-6c61-4c01-bc1f-50f00426ef85 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.259939] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126e256b-48ac-45b2-aa33-f7fe954b54ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.300731] env[62519]: DEBUG nova.network.neutron [-] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.302020] env[62519]: DEBUG nova.compute.manager [req-eb2c1599-8b89-47ac-95ce-4cebc893bdce req-f9498ab5-5ad3-4040-afcf-ff0a1b06d0a2 service nova] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Detach interface failed, port_id=a8a61a1e-f910-4564-b6af-8507d8141e45, reason: Instance 56790738-4759-468a-9f43-f9c2bc2de23a could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1509.392462] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-495f7d1e-6e47-4b67-a5c2-65ed60e95ee5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.403969] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19147a60-66ed-4a10-85b1-ece47605e81a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.416067] env[62519]: DEBUG nova.compute.utils [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1509.444346] env[62519]: DEBUG nova.compute.manager [req-766a7291-2008-4d76-bd05-4fe9c1b231a6 req-246819cc-3195-4b4b-aec7-2b29065835c6 service nova] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Detach interface failed, port_id=45a2d062-302a-4749-81e8-8a16c4726fe2, reason: Instance 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1509.453334] env[62519]: DEBUG nova.network.neutron [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Successfully updated port: 0a18744e-471e-4942-8a22-59aa90a680af {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1509.473613] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583028} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.473613] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634/51eaac08-75fd-49f9-9b1a-cc2a2d799634.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1509.473850] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1509.473987] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da1b9da7-32ec-4a19-80c2-f33b679143c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.482662] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1509.482662] env[62519]: value = "task-1802232" [ 1509.482662] env[62519]: _type = "Task" [ 1509.482662] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.494087] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802232, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.622519] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.629011] env[62519]: DEBUG oslo_vmware.api [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802231, 'name': PowerOffVM_Task, 'duration_secs': 0.286822} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.629297] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1509.629464] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1509.629720] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e1a820b-d175-4664-bf80-814d2643be83 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.697719] env[62519]: INFO nova.compute.manager [-] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Took 1.30 seconds to deallocate network for instance. [ 1509.711443] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.728553] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.729109] env[62519]: DEBUG nova.compute.manager [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1509.732452] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.516s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.732655] env[62519]: DEBUG nova.objects.instance [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lazy-loading 'resources' on Instance uuid 681ef7a9-3b24-450a-9034-6d30177995d7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1509.804225] env[62519]: INFO nova.compute.manager [-] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Took 1.42 seconds to deallocate network for instance. [ 1509.920749] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.956291] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "refresh_cache-2b68e95a-1d93-43ee-b0a6-996c9042f5c7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.956422] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquired lock "refresh_cache-2b68e95a-1d93-43ee-b0a6-996c9042f5c7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.956559] env[62519]: DEBUG nova.network.neutron [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1509.993620] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802232, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069559} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.993960] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1509.994847] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccad5b36-84c5-4880-a077-a860c95d898f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.018444] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634/51eaac08-75fd-49f9-9b1a-cc2a2d799634.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1510.018770] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2954472d-f95d-4de6-9074-830be2892b57 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.034996] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1510.035242] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1510.035417] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Deleting the datastore file [datastore1] cfefa7c8-4986-4ad0-ac20-8784ee44a737 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1510.036077] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-477c95b2-46f2-4b4d-b208-03ca3e254198 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.041237] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1510.041237] env[62519]: value = "task-1802234" [ 1510.041237] env[62519]: _type = "Task" [ 1510.041237] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.042555] env[62519]: DEBUG oslo_vmware.api [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1510.042555] env[62519]: value = "task-1802235" [ 1510.042555] env[62519]: _type = "Task" [ 1510.042555] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.054950] env[62519]: DEBUG oslo_vmware.api [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.058391] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802234, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.206181] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.235398] env[62519]: DEBUG nova.compute.utils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1510.236891] env[62519]: DEBUG nova.compute.manager [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1510.237121] env[62519]: DEBUG nova.network.neutron [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1510.293069] env[62519]: DEBUG nova.policy [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5eddb512b98d43e1b1b38b5f2b0a32e4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9498e9017e174096886dccaf5574a0ab', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1510.310138] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.504462] env[62519]: DEBUG nova.network.neutron [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1510.559371] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802234, 'name': ReconfigVM_Task, 'duration_secs': 0.28694} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.559633] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634/51eaac08-75fd-49f9-9b1a-cc2a2d799634.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1510.562671] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92dff645-b53a-45d4-9d2a-bf56c69c17f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.569023] env[62519]: DEBUG oslo_vmware.api [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.576146] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1510.576146] env[62519]: value = "task-1802236" [ 1510.576146] env[62519]: _type = "Task" [ 1510.576146] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.582276] env[62519]: DEBUG nova.network.neutron [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Successfully created port: 883e1605-a678-4428-b758-c717d5d49ce2 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1510.587903] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802236, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.742681] env[62519]: DEBUG nova.compute.manager [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1510.775331] env[62519]: DEBUG nova.network.neutron [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Updating instance_info_cache with network_info: [{"id": "0a18744e-471e-4942-8a22-59aa90a680af", "address": "fa:16:3e:e8:c6:01", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a18744e-47", "ovs_interfaceid": "0a18744e-471e-4942-8a22-59aa90a680af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.817905] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c013c64f-a136-4332-b094-3f41bc683ff8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.827166] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00e45b9-1961-4385-a254-5c7984c6d9df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.862865] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5096f5-23eb-4171-b0b6-0e706892e4fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.871234] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63f8536-9c39-43ae-a862-c5b401a9b88f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.886183] env[62519]: DEBUG nova.compute.provider_tree [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1510.971411] env[62519]: DEBUG nova.compute.manager [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Received event network-vif-plugged-0a18744e-471e-4942-8a22-59aa90a680af {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1510.971573] env[62519]: DEBUG oslo_concurrency.lockutils [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] Acquiring lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.971720] env[62519]: DEBUG oslo_concurrency.lockutils [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] Lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.971938] env[62519]: DEBUG oslo_concurrency.lockutils [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] Lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.972299] env[62519]: DEBUG nova.compute.manager [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] No waiting events found dispatching network-vif-plugged-0a18744e-471e-4942-8a22-59aa90a680af {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1510.972547] env[62519]: WARNING nova.compute.manager [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Received unexpected event network-vif-plugged-0a18744e-471e-4942-8a22-59aa90a680af for instance with vm_state building and task_state spawning. [ 1510.972745] env[62519]: DEBUG nova.compute.manager [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Received event network-changed-0a18744e-471e-4942-8a22-59aa90a680af {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1510.972913] env[62519]: DEBUG nova.compute.manager [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Refreshing instance network info cache due to event network-changed-0a18744e-471e-4942-8a22-59aa90a680af. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1510.973102] env[62519]: DEBUG oslo_concurrency.lockutils [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] Acquiring lock "refresh_cache-2b68e95a-1d93-43ee-b0a6-996c9042f5c7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.031671] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Acquiring lock "91902e7f-8c15-447b-a3a8-04433434b1b6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.032070] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.032403] env[62519]: INFO nova.compute.manager [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Attaching volume b3ec9fe2-f38d-46f0-977a-134fc940c266 to /dev/sdb [ 1511.067850] env[62519]: DEBUG oslo_vmware.api [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.608132} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.068207] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1511.068461] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1511.068712] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1511.068953] env[62519]: INFO nova.compute.manager [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Took 1.97 seconds to destroy the instance on the hypervisor. [ 1511.069304] env[62519]: DEBUG oslo.service.loopingcall [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1511.069560] env[62519]: DEBUG nova.compute.manager [-] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1511.069691] env[62519]: DEBUG nova.network.neutron [-] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1511.083709] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad52c13a-470d-47fd-a30d-d184cba81bce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.095480] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802236, 'name': Rename_Task, 'duration_secs': 0.153875} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.098035] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1511.098464] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd92dcae-7fd6-4dca-9d19-f1d8354a08b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.101286] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f15487d-c41c-4112-808e-2bb63c104dd0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.112240] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1511.112240] env[62519]: value = "task-1802237" [ 1511.112240] env[62519]: _type = "Task" [ 1511.112240] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.121938] env[62519]: DEBUG nova.virt.block_device [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Updating existing volume attachment record: 26a510fd-bbef-483c-86d5-a6103f0e295e {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1511.130952] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802237, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.278731] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Releasing lock "refresh_cache-2b68e95a-1d93-43ee-b0a6-996c9042f5c7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.279124] env[62519]: DEBUG nova.compute.manager [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Instance network_info: |[{"id": "0a18744e-471e-4942-8a22-59aa90a680af", "address": "fa:16:3e:e8:c6:01", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a18744e-47", "ovs_interfaceid": "0a18744e-471e-4942-8a22-59aa90a680af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1511.279449] env[62519]: DEBUG oslo_concurrency.lockutils [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] Acquired lock "refresh_cache-2b68e95a-1d93-43ee-b0a6-996c9042f5c7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.283125] env[62519]: DEBUG nova.network.neutron [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Refreshing network info cache for port 0a18744e-471e-4942-8a22-59aa90a680af {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1511.283125] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:c6:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a18744e-471e-4942-8a22-59aa90a680af', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1511.291235] env[62519]: DEBUG oslo.service.loopingcall [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1511.291235] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1511.291235] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11eb17f6-14c2-4ad8-a236-1777a4739b9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.324415] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1511.324415] env[62519]: value = "task-1802238" [ 1511.324415] env[62519]: _type = "Task" [ 1511.324415] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.335528] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802238, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.389541] env[62519]: DEBUG nova.scheduler.client.report [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1511.624809] env[62519]: DEBUG oslo_vmware.api [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802237, 'name': PowerOnVM_Task, 'duration_secs': 0.51314} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.625167] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1511.625410] env[62519]: INFO nova.compute.manager [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Took 7.45 seconds to spawn the instance on the hypervisor. [ 1511.625617] env[62519]: DEBUG nova.compute.manager [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1511.626548] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38b21ae-a840-46cd-84d9-2ffbca8bfd35 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.752748] env[62519]: DEBUG nova.compute.manager [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1511.785520] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1511.785759] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1511.785911] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1511.786166] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1511.786324] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1511.786464] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1511.788192] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1511.788192] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1511.788296] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1511.788514] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1511.788742] env[62519]: DEBUG nova.virt.hardware [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1511.791096] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0690f1bd-ab05-4ff1-b983-e3020b806608 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.797393] env[62519]: DEBUG nova.compute.manager [req-8cd87f0e-09f1-48c2-9f62-167edc3c4938 req-9ab5e78b-4456-4feb-ad25-8a4c28463c47 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received event network-vif-deleted-7d9ab028-27a4-47e2-8a46-08e1672bd6fb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1511.797762] env[62519]: INFO nova.compute.manager [req-8cd87f0e-09f1-48c2-9f62-167edc3c4938 req-9ab5e78b-4456-4feb-ad25-8a4c28463c47 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Neutron deleted interface 7d9ab028-27a4-47e2-8a46-08e1672bd6fb; detaching it from the instance and deleting it from the info cache [ 1511.798128] env[62519]: DEBUG nova.network.neutron [req-8cd87f0e-09f1-48c2-9f62-167edc3c4938 req-9ab5e78b-4456-4feb-ad25-8a4c28463c47 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Updating instance_info_cache with network_info: [{"id": "dd1ebcef-cdc1-4641-8622-443a2365b497", "address": "fa:16:3e:02:07:45", "network": {"id": "b272ddf8-d08f-4e19-a9fc-13fe663dac46", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1317143348", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd1ebcef-cd", "ovs_interfaceid": "dd1ebcef-cdc1-4641-8622-443a2365b497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6a3b6bdf-835f-429c-982d-00028308b4b2", "address": "fa:16:3e:e2:f0:27", "network": {"id": "73b13a51-2f2f-4737-aacc-1d0fdc8db0c8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-602986409", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.178", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a3b6bdf-83", "ovs_interfaceid": "6a3b6bdf-835f-429c-982d-00028308b4b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.808960] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106fc733-0154-4fb9-b981-ef4710166c11 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.843698] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802238, 'name': CreateVM_Task, 'duration_secs': 0.399465} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.843948] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1511.844693] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.844919] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.845322] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1511.845866] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91a8827f-45c8-4f30-8934-ef776891a025 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.853613] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1511.853613] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f58d0e-aba1-ac36-df78-38063b819436" [ 1511.853613] env[62519]: _type = "Task" [ 1511.853613] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.865093] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f58d0e-aba1-ac36-df78-38063b819436, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.894884] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.162s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.901016] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.989s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.901016] env[62519]: INFO nova.compute.claims [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1511.928449] env[62519]: INFO nova.scheduler.client.report [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Deleted allocations for instance 681ef7a9-3b24-450a-9034-6d30177995d7 [ 1512.154675] env[62519]: INFO nova.compute.manager [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Took 44.22 seconds to build instance. [ 1512.220700] env[62519]: DEBUG nova.network.neutron [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Updated VIF entry in instance network info cache for port 0a18744e-471e-4942-8a22-59aa90a680af. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1512.220700] env[62519]: DEBUG nova.network.neutron [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Updating instance_info_cache with network_info: [{"id": "0a18744e-471e-4942-8a22-59aa90a680af", "address": "fa:16:3e:e8:c6:01", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.239", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a18744e-47", "ovs_interfaceid": "0a18744e-471e-4942-8a22-59aa90a680af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.300562] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-348f78cf-ec25-47b2-a1c8-ac4a328ad444 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.312615] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70be9be-9d97-4c89-ab5e-91926c477da9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.327197] env[62519]: DEBUG nova.network.neutron [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Successfully updated port: 883e1605-a678-4428-b758-c717d5d49ce2 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1512.357899] env[62519]: DEBUG nova.compute.manager [req-8cd87f0e-09f1-48c2-9f62-167edc3c4938 req-9ab5e78b-4456-4feb-ad25-8a4c28463c47 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Detach interface failed, port_id=7d9ab028-27a4-47e2-8a46-08e1672bd6fb, reason: Instance cfefa7c8-4986-4ad0-ac20-8784ee44a737 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1512.369741] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f58d0e-aba1-ac36-df78-38063b819436, 'name': SearchDatastore_Task, 'duration_secs': 0.014623} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.370330] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.370565] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1512.370791] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.370933] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.371119] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1512.371438] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c82e4f8-2f47-4a06-b1ba-74556ef6969b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.383776] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1512.384090] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1512.384841] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c105c8aa-9f7c-44a6-bc13-2f27b117be7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.392215] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1512.392215] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5276fa2b-b0cb-716f-fc18-27dd8c80b0e4" [ 1512.392215] env[62519]: _type = "Task" [ 1512.392215] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.402070] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5276fa2b-b0cb-716f-fc18-27dd8c80b0e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.439130] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ca8ca56a-35a2-4cb3-a5f6-65171c7c779d tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "681ef7a9-3b24-450a-9034-6d30177995d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.224s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.723028] env[62519]: DEBUG oslo_concurrency.lockutils [req-ce5546a1-82ae-4039-948a-ae1065292752 req-fdd8645d-f45f-4cf8-8ac1-aa8f42c778b1 service nova] Releasing lock "refresh_cache-2b68e95a-1d93-43ee-b0a6-996c9042f5c7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.833099] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquiring lock "refresh_cache-a59be5e6-2316-4766-933a-4d01dfe4fec1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.833281] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquired lock "refresh_cache-a59be5e6-2316-4766-933a-4d01dfe4fec1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.833438] env[62519]: DEBUG nova.network.neutron [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.903320] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5276fa2b-b0cb-716f-fc18-27dd8c80b0e4, 'name': SearchDatastore_Task, 'duration_secs': 0.010206} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.904142] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e67ddeb2-7a70-4f62-a26b-8b07206b0b81 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.914479] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1512.914479] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52476436-b4f1-ceae-f5ff-dc7623a05db9" [ 1512.914479] env[62519]: _type = "Task" [ 1512.914479] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.923841] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52476436-b4f1-ceae-f5ff-dc7623a05db9, 'name': SearchDatastore_Task, 'duration_secs': 0.010578} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.924171] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.924349] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2b68e95a-1d93-43ee-b0a6-996c9042f5c7/2b68e95a-1d93-43ee-b0a6-996c9042f5c7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1512.924635] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7913e6c4-fed5-4297-bc55-32e3a62b8c91 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.932539] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1512.932539] env[62519]: value = "task-1802242" [ 1512.932539] env[62519]: _type = "Task" [ 1512.932539] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.952455] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.013091] env[62519]: DEBUG nova.network.neutron [-] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.128077] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.128077] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.227334] env[62519]: DEBUG nova.compute.manager [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Received event network-vif-plugged-883e1605-a678-4428-b758-c717d5d49ce2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1513.227596] env[62519]: DEBUG oslo_concurrency.lockutils [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] Acquiring lock "a59be5e6-2316-4766-933a-4d01dfe4fec1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.227874] env[62519]: DEBUG oslo_concurrency.lockutils [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] Lock "a59be5e6-2316-4766-933a-4d01dfe4fec1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.228218] env[62519]: DEBUG oslo_concurrency.lockutils [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] Lock "a59be5e6-2316-4766-933a-4d01dfe4fec1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.228470] env[62519]: DEBUG nova.compute.manager [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] No waiting events found dispatching network-vif-plugged-883e1605-a678-4428-b758-c717d5d49ce2 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1513.229257] env[62519]: WARNING nova.compute.manager [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Received unexpected event network-vif-plugged-883e1605-a678-4428-b758-c717d5d49ce2 for instance with vm_state building and task_state spawning. [ 1513.229526] env[62519]: DEBUG nova.compute.manager [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Received event network-changed-883e1605-a678-4428-b758-c717d5d49ce2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1513.229714] env[62519]: DEBUG nova.compute.manager [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Refreshing instance network info cache due to event network-changed-883e1605-a678-4428-b758-c717d5d49ce2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1513.229890] env[62519]: DEBUG oslo_concurrency.lockutils [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] Acquiring lock "refresh_cache-a59be5e6-2316-4766-933a-4d01dfe4fec1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1513.392430] env[62519]: DEBUG nova.network.neutron [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1513.443562] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802242, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.447858] env[62519]: INFO nova.compute.manager [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Rebuilding instance [ 1513.513718] env[62519]: DEBUG nova.compute.manager [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1513.514112] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777997b9-8c56-4be1-9bd5-1ad47e69a1e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.519387] env[62519]: INFO nova.compute.manager [-] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Took 2.45 seconds to deallocate network for instance. [ 1513.551603] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc61d1d-172f-4047-9459-10e97b832101 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.560773] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfb3703-38db-469c-9004-83cc62d1e2aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.600110] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c80e9b-98d5-42f1-a569-21f5c033ae10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.608672] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee90567-2e05-4b8a-9319-99eec5fb00de {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.623848] env[62519]: DEBUG nova.compute.provider_tree [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1513.626547] env[62519]: DEBUG nova.network.neutron [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Updating instance_info_cache with network_info: [{"id": "883e1605-a678-4428-b758-c717d5d49ce2", "address": "fa:16:3e:5d:b5:47", "network": {"id": "45dcc816-e95d-44c3-8cf1-0e178b840ae4", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-504304918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9498e9017e174096886dccaf5574a0ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap883e1605-a6", "ovs_interfaceid": "883e1605-a678-4428-b758-c717d5d49ce2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.630107] env[62519]: DEBUG oslo_concurrency.lockutils [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "c8b7568b-ba07-4f65-818b-f84910209361" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.630339] env[62519]: DEBUG oslo_concurrency.lockutils [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "c8b7568b-ba07-4f65-818b-f84910209361" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.630531] env[62519]: DEBUG oslo_concurrency.lockutils [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "c8b7568b-ba07-4f65-818b-f84910209361-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.630705] env[62519]: DEBUG oslo_concurrency.lockutils [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "c8b7568b-ba07-4f65-818b-f84910209361-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.630864] env[62519]: DEBUG oslo_concurrency.lockutils [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "c8b7568b-ba07-4f65-818b-f84910209361-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.632988] env[62519]: INFO nova.compute.manager [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Terminating instance [ 1513.664637] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1d8c6fef-464e-42e4-b682-66f718a2e8f1 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.425s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.888739] env[62519]: DEBUG nova.compute.manager [req-557947bc-f150-4454-982e-fa64cd6ec3d2 req-5b2b7053-22bd-4ddf-9222-ab364d0629e3 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received event network-vif-deleted-dd1ebcef-cdc1-4641-8622-443a2365b497 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1513.889162] env[62519]: DEBUG nova.compute.manager [req-557947bc-f150-4454-982e-fa64cd6ec3d2 req-5b2b7053-22bd-4ddf-9222-ab364d0629e3 service nova] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Received event network-vif-deleted-6a3b6bdf-835f-429c-982d-00028308b4b2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1513.944704] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802242, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563878} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.944971] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2b68e95a-1d93-43ee-b0a6-996c9042f5c7/2b68e95a-1d93-43ee-b0a6-996c9042f5c7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1513.945208] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1513.945455] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ccbfdce-1b88-4dbc-b18d-e24bf3a86e63 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.955039] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1513.955039] env[62519]: value = "task-1802244" [ 1513.955039] env[62519]: _type = "Task" [ 1513.955039] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.964302] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.033170] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.131997] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Releasing lock "refresh_cache-a59be5e6-2316-4766-933a-4d01dfe4fec1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.132329] env[62519]: DEBUG nova.compute.manager [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Instance network_info: |[{"id": "883e1605-a678-4428-b758-c717d5d49ce2", "address": "fa:16:3e:5d:b5:47", "network": {"id": "45dcc816-e95d-44c3-8cf1-0e178b840ae4", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-504304918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9498e9017e174096886dccaf5574a0ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap883e1605-a6", "ovs_interfaceid": "883e1605-a678-4428-b758-c717d5d49ce2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1514.132914] env[62519]: DEBUG oslo_concurrency.lockutils [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] Acquired lock "refresh_cache-a59be5e6-2316-4766-933a-4d01dfe4fec1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.133129] env[62519]: DEBUG nova.network.neutron [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Refreshing network info cache for port 883e1605-a678-4428-b758-c717d5d49ce2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1514.134380] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:b5:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33fdc099-7497-41c1-b40c-1558937132d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '883e1605-a678-4428-b758-c717d5d49ce2', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1514.142733] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Creating folder: Project (9498e9017e174096886dccaf5574a0ab). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.144565] env[62519]: DEBUG nova.compute.manager [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1514.144565] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1514.144847] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f13f8367-746b-42a5-89bf-924e625f4f22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.147988] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b82b664-01e8-4aa4-82ac-d3223a6245ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.157195] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1514.157491] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38d53450-816e-4164-a788-2c2976e87324 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.165037] env[62519]: DEBUG oslo_vmware.api [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1514.165037] env[62519]: value = "task-1802246" [ 1514.165037] env[62519]: _type = "Task" [ 1514.165037] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.169253] env[62519]: DEBUG nova.compute.manager [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1514.172375] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Created folder: Project (9498e9017e174096886dccaf5574a0ab) in parent group-v373567. [ 1514.172572] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Creating folder: Instances. Parent ref: group-v373664. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.173459] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-644a294f-d626-499c-89eb-1f92a8fecbb8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.181911] env[62519]: DEBUG oslo_vmware.api [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.190094] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Created folder: Instances in parent group-v373664. [ 1514.190387] env[62519]: DEBUG oslo.service.loopingcall [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1514.190639] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1514.190862] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-678fba76-077d-48bf-8139-69eb88564013 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.218100] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1514.218100] env[62519]: value = "task-1802248" [ 1514.218100] env[62519]: _type = "Task" [ 1514.218100] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.232933] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802248, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.272408] env[62519]: ERROR nova.scheduler.client.report [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [req-10fd39cf-d889-4fc2-8142-d636cd9c0435] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-10fd39cf-d889-4fc2-8142-d636cd9c0435"}]} [ 1514.291684] env[62519]: DEBUG nova.scheduler.client.report [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1514.308782] env[62519]: DEBUG nova.scheduler.client.report [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1514.308924] env[62519]: DEBUG nova.compute.provider_tree [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1514.322027] env[62519]: DEBUG nova.scheduler.client.report [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1514.344028] env[62519]: DEBUG nova.scheduler.client.report [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1514.467822] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070039} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.468185] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1514.469042] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec88d116-14ed-42ca-a2bf-9f618e835f51 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.494925] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 2b68e95a-1d93-43ee-b0a6-996c9042f5c7/2b68e95a-1d93-43ee-b0a6-996c9042f5c7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1514.497867] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-707a50d9-7284-4891-85cd-ab8f19f355b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.520188] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1514.520188] env[62519]: value = "task-1802249" [ 1514.520188] env[62519]: _type = "Task" [ 1514.520188] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.534408] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1514.534704] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802249, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.534927] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d55d983-adad-46f4-813d-3ea743ba1985 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.543166] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1514.543166] env[62519]: value = "task-1802250" [ 1514.543166] env[62519]: _type = "Task" [ 1514.543166] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.553738] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.677323] env[62519]: DEBUG oslo_vmware.api [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802246, 'name': PowerOffVM_Task, 'duration_secs': 0.459854} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.681762] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1514.681947] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1514.682531] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef5dd1df-a183-4216-8f68-7a13c27a2c2b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.693759] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.735972] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802248, 'name': CreateVM_Task, 'duration_secs': 0.493605} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.736210] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1514.737218] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.737394] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.738027] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1514.738600] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48aedd36-ebee-4d3c-bc17-5c859456ae07 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.746597] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1514.746597] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5268955e-2995-63e9-3839-5f0ec89bb3e2" [ 1514.746597] env[62519]: _type = "Task" [ 1514.746597] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.759209] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5268955e-2995-63e9-3839-5f0ec89bb3e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.770049] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1514.770049] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1514.770049] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Deleting the datastore file [datastore1] c8b7568b-ba07-4f65-818b-f84910209361 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1514.770290] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2ea7313-7044-469a-ac37-0089f16624b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.779985] env[62519]: DEBUG oslo_vmware.api [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for the task: (returnval){ [ 1514.779985] env[62519]: value = "task-1802252" [ 1514.779985] env[62519]: _type = "Task" [ 1514.779985] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.789437] env[62519]: DEBUG oslo_vmware.api [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.948467] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ec328d-a520-4740-a9d3-c01a2aa98e10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.956924] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad98a43-b6a8-4c97-80b7-0b0c131fdfd1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.992176] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85f73ef-73e3-4858-8b99-7f1a07622f66 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.001987] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37875117-5f98-4b9a-a7ed-219752292633 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.020802] env[62519]: DEBUG nova.compute.provider_tree [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1515.029607] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802249, 'name': ReconfigVM_Task, 'duration_secs': 0.404264} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.031752] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 2b68e95a-1d93-43ee-b0a6-996c9042f5c7/2b68e95a-1d93-43ee-b0a6-996c9042f5c7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1515.031752] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e93b68a-bf63-40a1-b0ad-7fdacdf8496f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.039214] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1515.039214] env[62519]: value = "task-1802253" [ 1515.039214] env[62519]: _type = "Task" [ 1515.039214] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.051186] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802253, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.055119] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802250, 'name': PowerOffVM_Task, 'duration_secs': 0.220459} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.056370] env[62519]: DEBUG nova.network.neutron [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Updated VIF entry in instance network info cache for port 883e1605-a678-4428-b758-c717d5d49ce2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1515.056370] env[62519]: DEBUG nova.network.neutron [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Updating instance_info_cache with network_info: [{"id": "883e1605-a678-4428-b758-c717d5d49ce2", "address": "fa:16:3e:5d:b5:47", "network": {"id": "45dcc816-e95d-44c3-8cf1-0e178b840ae4", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-504304918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9498e9017e174096886dccaf5574a0ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap883e1605-a6", "ovs_interfaceid": "883e1605-a678-4428-b758-c717d5d49ce2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.057641] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1515.057852] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1515.058806] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde600cc-a7b6-4d52-ac1e-3007662eb736 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.066465] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1515.067041] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fd310b3-aaa4-4173-820c-3f43b6549829 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.147600] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1515.147861] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1515.148081] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleting the datastore file [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1515.148344] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fffbf32-cd52-4e99-a756-71c331bd32bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.157377] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1515.157377] env[62519]: value = "task-1802255" [ 1515.157377] env[62519]: _type = "Task" [ 1515.157377] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.166643] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.258339] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5268955e-2995-63e9-3839-5f0ec89bb3e2, 'name': SearchDatastore_Task, 'duration_secs': 0.014155} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.258661] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.258891] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1515.259324] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.259324] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.259440] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1515.259685] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4bda179-165e-45cb-bcd4-3ad38ad3d3fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.269704] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1515.269892] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1515.270733] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11af3c14-6d77-4e54-949b-8b43f3baf992 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.276562] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1515.276562] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d421a1-2e0a-ea48-f817-102e89157c6f" [ 1515.276562] env[62519]: _type = "Task" [ 1515.276562] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.286936] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d421a1-2e0a-ea48-f817-102e89157c6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.292037] env[62519]: DEBUG oslo_vmware.api [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Task: {'id': task-1802252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154009} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.292279] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1515.292456] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1515.292624] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1515.292792] env[62519]: INFO nova.compute.manager [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1515.293025] env[62519]: DEBUG oslo.service.loopingcall [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1515.293214] env[62519]: DEBUG nova.compute.manager [-] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1515.293295] env[62519]: DEBUG nova.network.neutron [-] [instance: c8b7568b-ba07-4f65-818b-f84910209361] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1515.550309] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802253, 'name': Rename_Task, 'duration_secs': 0.168523} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.550590] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1515.550839] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-576464a1-6214-4262-9fc4-60e49d0e4da6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.559019] env[62519]: DEBUG nova.scheduler.client.report [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 65 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1515.559019] env[62519]: DEBUG nova.compute.provider_tree [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 65 to 66 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1515.559019] env[62519]: DEBUG nova.compute.provider_tree [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1515.564604] env[62519]: DEBUG oslo_concurrency.lockutils [req-64a250ed-185e-4aa9-b5a5-8cd6c5ea053a req-1570309a-09cc-4faf-91ca-0185cc3cc18a service nova] Releasing lock "refresh_cache-a59be5e6-2316-4766-933a-4d01dfe4fec1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.567514] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1515.567514] env[62519]: value = "task-1802256" [ 1515.567514] env[62519]: _type = "Task" [ 1515.567514] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.579132] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.667606] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.702226] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1515.702777] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373663', 'volume_id': 'b3ec9fe2-f38d-46f0-977a-134fc940c266', 'name': 'volume-b3ec9fe2-f38d-46f0-977a-134fc940c266', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '91902e7f-8c15-447b-a3a8-04433434b1b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'b3ec9fe2-f38d-46f0-977a-134fc940c266', 'serial': 'b3ec9fe2-f38d-46f0-977a-134fc940c266'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1515.703409] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec5bf0e-a28d-4600-9b05-1e5f87cbbb91 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.720635] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa868f4-44e4-46d4-80bd-016cb59c3ebb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.749713] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] volume-b3ec9fe2-f38d-46f0-977a-134fc940c266/volume-b3ec9fe2-f38d-46f0-977a-134fc940c266.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1515.749896] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33b0097e-1b73-483b-a1a7-91bee912c960 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.769885] env[62519]: DEBUG oslo_vmware.api [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Waiting for the task: (returnval){ [ 1515.769885] env[62519]: value = "task-1802257" [ 1515.769885] env[62519]: _type = "Task" [ 1515.769885] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.779100] env[62519]: DEBUG oslo_vmware.api [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.788984] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d421a1-2e0a-ea48-f817-102e89157c6f, 'name': SearchDatastore_Task, 'duration_secs': 0.034464} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.789881] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa2860dc-5c6c-4683-9270-a6267c59eaec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.796357] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1515.796357] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526dacfa-7e84-1c9d-7ab7-dedc36f95f57" [ 1515.796357] env[62519]: _type = "Task" [ 1515.796357] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.805905] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526dacfa-7e84-1c9d-7ab7-dedc36f95f57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.918781] env[62519]: DEBUG nova.compute.manager [req-645560ae-38a8-4612-be67-5ab1f796a7cf req-627ed2e9-bab7-4aa7-95c8-a50dbcf3120b service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Received event network-vif-deleted-e03efe42-da32-408a-a635-c94ee5a55303 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1515.919335] env[62519]: INFO nova.compute.manager [req-645560ae-38a8-4612-be67-5ab1f796a7cf req-627ed2e9-bab7-4aa7-95c8-a50dbcf3120b service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Neutron deleted interface e03efe42-da32-408a-a635-c94ee5a55303; detaching it from the instance and deleting it from the info cache [ 1515.919335] env[62519]: DEBUG nova.network.neutron [req-645560ae-38a8-4612-be67-5ab1f796a7cf req-627ed2e9-bab7-4aa7-95c8-a50dbcf3120b service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.055725] env[62519]: DEBUG nova.network.neutron [-] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.069060] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.171s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.069632] env[62519]: DEBUG nova.compute.manager [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1516.072508] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.557s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.085613] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802256, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.170393] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802255, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.679322} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.170766] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1516.170994] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1516.171231] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1516.282897] env[62519]: DEBUG oslo_vmware.api [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802257, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.310026] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526dacfa-7e84-1c9d-7ab7-dedc36f95f57, 'name': SearchDatastore_Task, 'duration_secs': 0.013046} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.310026] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.310026] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] a59be5e6-2316-4766-933a-4d01dfe4fec1/a59be5e6-2316-4766-933a-4d01dfe4fec1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1516.310026] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5429b46-ea9c-4bda-badd-9651133856df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.324552] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1516.324552] env[62519]: value = "task-1802258" [ 1516.324552] env[62519]: _type = "Task" [ 1516.324552] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.333427] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802258, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.425113] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35b36ffe-1f5a-41ce-89bd-291817299c46 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.436246] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b378d763-5a78-4dd1-b1b0-8312089a6291 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.486521] env[62519]: DEBUG nova.compute.manager [req-645560ae-38a8-4612-be67-5ab1f796a7cf req-627ed2e9-bab7-4aa7-95c8-a50dbcf3120b service nova] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Detach interface failed, port_id=e03efe42-da32-408a-a635-c94ee5a55303, reason: Instance c8b7568b-ba07-4f65-818b-f84910209361 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1516.558618] env[62519]: INFO nova.compute.manager [-] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Took 1.27 seconds to deallocate network for instance. [ 1516.586961] env[62519]: DEBUG nova.compute.utils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1516.603279] env[62519]: DEBUG oslo_vmware.api [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802256, 'name': PowerOnVM_Task, 'duration_secs': 0.796645} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.603934] env[62519]: DEBUG nova.compute.manager [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1516.604958] env[62519]: DEBUG nova.network.neutron [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1516.607365] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1516.607580] env[62519]: INFO nova.compute.manager [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Took 7.57 seconds to spawn the instance on the hypervisor. [ 1516.607580] env[62519]: DEBUG nova.compute.manager [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1516.609746] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba8976f-4465-4ea3-a372-36fa9e6895a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.701966] env[62519]: DEBUG nova.policy [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9b4ac19e10d4abdb7d7e54e5d093a8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0755b34e22d4478817ec4e2d57aac2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1516.783426] env[62519]: DEBUG oslo_vmware.api [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802257, 'name': ReconfigVM_Task, 'duration_secs': 0.622414} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.783810] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Reconfigured VM instance instance-0000000f to attach disk [datastore1] volume-b3ec9fe2-f38d-46f0-977a-134fc940c266/volume-b3ec9fe2-f38d-46f0-977a-134fc940c266.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1516.789712] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e4ea313-ac5b-432c-9ca4-27371e0b68d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.809043] env[62519]: DEBUG oslo_vmware.api [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Waiting for the task: (returnval){ [ 1516.809043] env[62519]: value = "task-1802259" [ 1516.809043] env[62519]: _type = "Task" [ 1516.809043] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.818904] env[62519]: DEBUG oslo_vmware.api [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802259, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.835244] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802258, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4894} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.835677] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] a59be5e6-2316-4766-933a-4d01dfe4fec1/a59be5e6-2316-4766-933a-4d01dfe4fec1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1516.835769] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1516.835978] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c48e1c57-eb74-4921-9b26-aac069326c5d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.843564] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1516.843564] env[62519]: value = "task-1802260" [ 1516.843564] env[62519]: _type = "Task" [ 1516.843564] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.852085] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802260, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.033402] env[62519]: DEBUG nova.network.neutron [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Successfully created port: cd903c9d-5093-4f0a-9439-683a3b25eef0 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1517.065633] env[62519]: DEBUG oslo_concurrency.lockutils [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.104576] env[62519]: DEBUG nova.compute.manager [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1517.135359] env[62519]: INFO nova.compute.manager [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Took 46.33 seconds to build instance. [ 1517.141346] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance cfefa7c8-4986-4ad0-ac20-8784ee44a737 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1517.221136] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1517.221390] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1517.221543] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1517.221720] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1517.221863] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1517.222011] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1517.222220] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1517.222377] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1517.222538] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1517.222698] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1517.222864] env[62519]: DEBUG nova.virt.hardware [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1517.223770] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf9df09-5ca4-4a21-b859-3d51e41b85ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.238579] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459b4b22-0ce9-492c-8140-a58ca1121a30 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.254201] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:ce:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1517.261861] env[62519]: DEBUG oslo.service.loopingcall [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.262123] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1517.262323] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afdcf98a-17a9-496c-b4a2-34dfaa6c0a11 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.285590] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1517.285590] env[62519]: value = "task-1802261" [ 1517.285590] env[62519]: _type = "Task" [ 1517.285590] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.297414] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802261, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.324376] env[62519]: DEBUG oslo_vmware.api [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802259, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.355445] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802260, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106462} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.355445] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1517.356074] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde5d927-0176-47f5-a7ee-4fabdee8d5a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.378911] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] a59be5e6-2316-4766-933a-4d01dfe4fec1/a59be5e6-2316-4766-933a-4d01dfe4fec1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1517.379764] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dab5fc44-e75a-4e57-9fc0-e9b77d72efe3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.401049] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1517.401049] env[62519]: value = "task-1802262" [ 1517.401049] env[62519]: _type = "Task" [ 1517.401049] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.410291] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802262, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.645503] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 09eefc1a-011b-4d2c-ab75-a1fcee740907 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1517.645503] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 42497ab5-cce9-4614-a6d1-dffbf6764d7b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1517.796555] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802261, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.820314] env[62519]: DEBUG oslo_vmware.api [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802259, 'name': ReconfigVM_Task, 'duration_secs': 0.707031} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.821068] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373663', 'volume_id': 'b3ec9fe2-f38d-46f0-977a-134fc940c266', 'name': 'volume-b3ec9fe2-f38d-46f0-977a-134fc940c266', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '91902e7f-8c15-447b-a3a8-04433434b1b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'b3ec9fe2-f38d-46f0-977a-134fc940c266', 'serial': 'b3ec9fe2-f38d-46f0-977a-134fc940c266'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1517.912028] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802262, 'name': ReconfigVM_Task, 'duration_secs': 0.50676} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.912264] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Reconfigured VM instance instance-00000022 to attach disk [datastore1] a59be5e6-2316-4766-933a-4d01dfe4fec1/a59be5e6-2316-4766-933a-4d01dfe4fec1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1517.912891] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28c01afd-60c4-462f-8866-2103db26234a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.920030] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1517.920030] env[62519]: value = "task-1802263" [ 1517.920030] env[62519]: _type = "Task" [ 1517.920030] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.930782] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802263, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.114754] env[62519]: DEBUG nova.compute.manager [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1518.139720] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1518.139865] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1518.140084] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1518.140273] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1518.140488] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1518.140703] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1518.140906] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1518.141086] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1518.141314] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1518.141524] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1518.141744] env[62519]: DEBUG nova.virt.hardware [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1518.143891] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad5a340-f108-453c-81fc-206449bfbbe0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.148559] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4e3dee19-b99a-4257-88da-1b0531e2c0f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1518.148559] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 1d4b14d3-8832-457e-aaed-462236555f57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1518.148559] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 2b68e95a-1d93-43ee-b0a6-996c9042f5c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1518.148805] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 11d4a010-959f-4f53-94dc-7499007612ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1518.157749] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81727743-524c-4478-b39d-dd511a47a060 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.296574] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802261, 'name': CreateVM_Task, 'duration_secs': 0.538791} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.296824] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1518.297669] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.297845] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.298235] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1518.298524] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b7d6b26-683e-427f-95d9-49924666ef7e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.303424] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1518.303424] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52081846-ff7a-1ebd-ecfc-3ca2c438426a" [ 1518.303424] env[62519]: _type = "Task" [ 1518.303424] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.311634] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52081846-ff7a-1ebd-ecfc-3ca2c438426a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.431335] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802263, 'name': Rename_Task, 'duration_secs': 0.274141} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.431617] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1518.431860] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51e4dd02-4841-477f-9626-e6a0a4a2bcd3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.439857] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1518.439857] env[62519]: value = "task-1802264" [ 1518.439857] env[62519]: _type = "Task" [ 1518.439857] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.448099] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.649132] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4994c0-96a4-4874-90b8-74f17da23551 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.901s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.653120] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1518.654160] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 099112ae-569b-4853-bc47-b0b8b97d2525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1518.654413] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 34d2991e-b6df-473d-8994-e45ff57ef131 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1518.764101] env[62519]: DEBUG nova.compute.manager [req-31bb7e4e-b57d-4207-ad45-db84f810a67e req-f0a4a4d5-6763-4dce-a5a1-93a8e952dc06 service nova] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Received event network-vif-plugged-cd903c9d-5093-4f0a-9439-683a3b25eef0 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1518.764388] env[62519]: DEBUG oslo_concurrency.lockutils [req-31bb7e4e-b57d-4207-ad45-db84f810a67e req-f0a4a4d5-6763-4dce-a5a1-93a8e952dc06 service nova] Acquiring lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.764630] env[62519]: DEBUG oslo_concurrency.lockutils [req-31bb7e4e-b57d-4207-ad45-db84f810a67e req-f0a4a4d5-6763-4dce-a5a1-93a8e952dc06 service nova] Lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.764861] env[62519]: DEBUG oslo_concurrency.lockutils [req-31bb7e4e-b57d-4207-ad45-db84f810a67e req-f0a4a4d5-6763-4dce-a5a1-93a8e952dc06 service nova] Lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.765079] env[62519]: DEBUG nova.compute.manager [req-31bb7e4e-b57d-4207-ad45-db84f810a67e req-f0a4a4d5-6763-4dce-a5a1-93a8e952dc06 service nova] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] No waiting events found dispatching network-vif-plugged-cd903c9d-5093-4f0a-9439-683a3b25eef0 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1518.765269] env[62519]: WARNING nova.compute.manager [req-31bb7e4e-b57d-4207-ad45-db84f810a67e req-f0a4a4d5-6763-4dce-a5a1-93a8e952dc06 service nova] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Received unexpected event network-vif-plugged-cd903c9d-5093-4f0a-9439-683a3b25eef0 for instance with vm_state building and task_state spawning. [ 1518.816686] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52081846-ff7a-1ebd-ecfc-3ca2c438426a, 'name': SearchDatastore_Task, 'duration_secs': 0.009434} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.817387] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.817477] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1518.817649] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.817813] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.818016] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1518.818319] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba502bad-b0a2-4538-97d4-119fecdfa4b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.828562] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1518.828769] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1518.829500] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e2d572e-208c-4cff-a1e3-d8f84983212e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.835724] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1518.835724] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5271af04-aa0c-423a-1ce8-9ae75e0f2c9b" [ 1518.835724] env[62519]: _type = "Task" [ 1518.835724] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.845946] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5271af04-aa0c-423a-1ce8-9ae75e0f2c9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.846808] env[62519]: DEBUG nova.network.neutron [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Successfully updated port: cd903c9d-5093-4f0a-9439-683a3b25eef0 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1518.860148] env[62519]: DEBUG nova.objects.instance [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Lazy-loading 'flavor' on Instance uuid 91902e7f-8c15-447b-a3a8-04433434b1b6 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1518.951660] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.073901] env[62519]: DEBUG nova.compute.manager [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1519.074822] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333a2fe2-526a-4e2b-9b12-63a1558d0003 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.152185] env[62519]: DEBUG nova.compute.manager [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1519.157650] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance dac173ff-1807-405f-a59c-bb2efef62a0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.158164] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f59a31e4-7fb9-4de7-b35f-da811a305f85 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.158394] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4c336ad1-8ce6-4f89-843e-0baae0d0dbda actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.346754] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5271af04-aa0c-423a-1ce8-9ae75e0f2c9b, 'name': SearchDatastore_Task, 'duration_secs': 0.010196} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.347749] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7a4ac19-f213-4715-bbf9-4559ea716856 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.350756] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "refresh_cache-3b506d10-a427-47b8-ab5f-c35e450b7eb1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.350756] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "refresh_cache-3b506d10-a427-47b8-ab5f-c35e450b7eb1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.350756] env[62519]: DEBUG nova.network.neutron [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1519.355200] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1519.355200] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ad267f-4a86-d010-91eb-1900b367802b" [ 1519.355200] env[62519]: _type = "Task" [ 1519.355200] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.364812] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ad267f-4a86-d010-91eb-1900b367802b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.364966] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e1dc4985-f528-4858-87c1-875c614a127e tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.333s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.450791] env[62519]: DEBUG oslo_vmware.api [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802264, 'name': PowerOnVM_Task, 'duration_secs': 0.58105} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.451064] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1519.451269] env[62519]: INFO nova.compute.manager [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Took 7.70 seconds to spawn the instance on the hypervisor. [ 1519.451439] env[62519]: DEBUG nova.compute.manager [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1519.452190] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898f85ea-fc83-4fb0-84b3-a1f332259bbd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.586213] env[62519]: INFO nova.compute.manager [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] instance snapshotting [ 1519.589022] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc30ca8-d374-42aa-8bfe-f63e0a96c39b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.607764] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4272c104-e162-4d72-807b-5d6a4998c885 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.663231] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 22380aef-c725-43a0-a957-06ced9518c21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.663384] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 91902e7f-8c15-447b-a3a8-04433434b1b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.683768] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.762938] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Acquiring lock "91902e7f-8c15-447b-a3a8-04433434b1b6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.763499] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.871018] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ad267f-4a86-d010-91eb-1900b367802b, 'name': SearchDatastore_Task, 'duration_secs': 0.008715} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.871018] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.871018] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634/51eaac08-75fd-49f9-9b1a-cc2a2d799634.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1519.871018] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5041f36b-77d5-42f7-8362-2e3ee5994c2c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.877484] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1519.877484] env[62519]: value = "task-1802265" [ 1519.877484] env[62519]: _type = "Task" [ 1519.877484] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.885614] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802265, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.969565] env[62519]: INFO nova.compute.manager [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Took 45.46 seconds to build instance. [ 1520.113812] env[62519]: DEBUG nova.network.neutron [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1520.120114] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1520.120482] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e87b2b13-5f4b-4e69-a4c9-15c45c0bc4ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.128130] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1520.128130] env[62519]: value = "task-1802266" [ 1520.128130] env[62519]: _type = "Task" [ 1520.128130] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.138050] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802266, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.168290] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance d8780c40-0099-4ccc-84ae-72fbb14fa1ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1520.168790] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 1462d213-3f9a-4c60-8056-0b68f20a4939 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.168790] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 029ee07c-705d-452b-9b14-385d69f2fbbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.168790] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c61c893f-826b-4874-b253-de6fbffa9e5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.169039] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance a59be5e6-2316-4766-933a-4d01dfe4fec1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.169039] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 2b391628-18a2-4606-8c59-58ba642cee50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.169166] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 3b506d10-a427-47b8-ab5f-c35e450b7eb1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.169235] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f7d5c77d-6c78-4969-b511-2b03ab624c84 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.266386] env[62519]: INFO nova.compute.manager [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Detaching volume b3ec9fe2-f38d-46f0-977a-134fc940c266 [ 1520.307556] env[62519]: INFO nova.virt.block_device [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Attempting to driver detach volume b3ec9fe2-f38d-46f0-977a-134fc940c266 from mountpoint /dev/sdb [ 1520.308026] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1520.308130] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373663', 'volume_id': 'b3ec9fe2-f38d-46f0-977a-134fc940c266', 'name': 'volume-b3ec9fe2-f38d-46f0-977a-134fc940c266', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '91902e7f-8c15-447b-a3a8-04433434b1b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'b3ec9fe2-f38d-46f0-977a-134fc940c266', 'serial': 'b3ec9fe2-f38d-46f0-977a-134fc940c266'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1520.310351] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717ea955-e0be-4e03-a1d6-c31b5545ebaf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.348849] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c182ed-1df8-4a83-9aee-e5df2f2789dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.357455] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c4e921-6279-4877-82e7-1bff9cbe39ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.386783] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b107b5-1116-4b86-90ce-6b8c87ed085c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.394572] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802265, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451796} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.406843] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634/51eaac08-75fd-49f9-9b1a-cc2a2d799634.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1520.407107] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1520.407413] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] The volume has not been displaced from its original location: [datastore1] volume-b3ec9fe2-f38d-46f0-977a-134fc940c266/volume-b3ec9fe2-f38d-46f0-977a-134fc940c266.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1520.412738] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Reconfiguring VM instance instance-0000000f to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1520.413703] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6500997b-b87d-4b74-b085-4996974ebce3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.415723] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d8659b0-a584-43f1-88e2-04110a6ab415 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.433504] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1520.433504] env[62519]: value = "task-1802267" [ 1520.433504] env[62519]: _type = "Task" [ 1520.433504] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.434888] env[62519]: DEBUG oslo_vmware.api [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Waiting for the task: (returnval){ [ 1520.434888] env[62519]: value = "task-1802268" [ 1520.434888] env[62519]: _type = "Task" [ 1520.434888] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.448065] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802267, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.451777] env[62519]: DEBUG oslo_vmware.api [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802268, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.622750] env[62519]: DEBUG nova.network.neutron [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Updating instance_info_cache with network_info: [{"id": "cd903c9d-5093-4f0a-9439-683a3b25eef0", "address": "fa:16:3e:00:46:e9", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd903c9d-50", "ovs_interfaceid": "cd903c9d-5093-4f0a-9439-683a3b25eef0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.637432] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802266, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.674571] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance a1551278-a306-4534-8d8d-3b3a003dde04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1520.674571] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance fe350d30-6fbd-4813-9634-ed05984fecfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.674571] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.674571] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 56790738-4759-468a-9f43-f9c2bc2de23a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.892624] env[62519]: DEBUG nova.compute.manager [req-244bbb6f-bf84-4390-aec9-5ad0f78020c1 req-e9e6dacc-789e-4f0a-8860-5730e3242994 service nova] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Received event network-changed-cd903c9d-5093-4f0a-9439-683a3b25eef0 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1520.892875] env[62519]: DEBUG nova.compute.manager [req-244bbb6f-bf84-4390-aec9-5ad0f78020c1 req-e9e6dacc-789e-4f0a-8860-5730e3242994 service nova] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Refreshing instance network info cache due to event network-changed-cd903c9d-5093-4f0a-9439-683a3b25eef0. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1520.892875] env[62519]: DEBUG oslo_concurrency.lockutils [req-244bbb6f-bf84-4390-aec9-5ad0f78020c1 req-e9e6dacc-789e-4f0a-8860-5730e3242994 service nova] Acquiring lock "refresh_cache-3b506d10-a427-47b8-ab5f-c35e450b7eb1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.948537] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802267, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086054} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.951727] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1520.952052] env[62519]: DEBUG oslo_vmware.api [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.952818] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e87699a-bef2-4e5f-9a6a-0358d9092da4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.977300] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634/51eaac08-75fd-49f9-9b1a-cc2a2d799634.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1520.978755] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edf0c24d-225b-4ff2-a1d2-66266536bf79 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.996784] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2114a68-6388-4c74-8f9c-6fba6cdea6e5 tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "a59be5e6-2316-4766-933a-4d01dfe4fec1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.996s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.004975] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1521.004975] env[62519]: value = "task-1802269" [ 1521.004975] env[62519]: _type = "Task" [ 1521.004975] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.014634] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802269, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.125462] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "refresh_cache-3b506d10-a427-47b8-ab5f-c35e450b7eb1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.125816] env[62519]: DEBUG nova.compute.manager [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Instance network_info: |[{"id": "cd903c9d-5093-4f0a-9439-683a3b25eef0", "address": "fa:16:3e:00:46:e9", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd903c9d-50", "ovs_interfaceid": "cd903c9d-5093-4f0a-9439-683a3b25eef0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1521.126169] env[62519]: DEBUG oslo_concurrency.lockutils [req-244bbb6f-bf84-4390-aec9-5ad0f78020c1 req-e9e6dacc-789e-4f0a-8860-5730e3242994 service nova] Acquired lock "refresh_cache-3b506d10-a427-47b8-ab5f-c35e450b7eb1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.126365] env[62519]: DEBUG nova.network.neutron [req-244bbb6f-bf84-4390-aec9-5ad0f78020c1 req-e9e6dacc-789e-4f0a-8860-5730e3242994 service nova] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Refreshing network info cache for port cd903c9d-5093-4f0a-9439-683a3b25eef0 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1521.127667] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:46:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '594b35bb-a20b-4f0e-bd35-9acf9cc6bf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd903c9d-5093-4f0a-9439-683a3b25eef0', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1521.136814] env[62519]: DEBUG oslo.service.loopingcall [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1521.138049] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1521.141518] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-452feb9c-2139-4de1-b815-f722cd0f51e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.168824] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802266, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.169099] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1521.169099] env[62519]: value = "task-1802270" [ 1521.169099] env[62519]: _type = "Task" [ 1521.169099] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.177934] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f19c860f-736a-4783-8ef5-8262040e53a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1521.178162] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4a29bff8-050a-4ad5-9d06-3a59c40b97ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1521.179261] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802270, 'name': CreateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.449481] env[62519]: DEBUG oslo_vmware.api [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802268, 'name': ReconfigVM_Task, 'duration_secs': 0.640642} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.449821] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Reconfigured VM instance instance-0000000f to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1521.454910] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffe3ab6f-64a9-4409-818f-c98de4dbca67 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.470284] env[62519]: DEBUG oslo_vmware.api [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Waiting for the task: (returnval){ [ 1521.470284] env[62519]: value = "task-1802271" [ 1521.470284] env[62519]: _type = "Task" [ 1521.470284] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.479711] env[62519]: DEBUG oslo_vmware.api [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802271, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.499475] env[62519]: DEBUG nova.compute.manager [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1521.516515] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.649541] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802266, 'name': CreateSnapshot_Task, 'duration_secs': 1.413764} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.649541] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1521.650564] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85533eb3-b3d1-4a88-bb2f-c8b6f6dac73e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.681593] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance ad374dd9-a92d-4b76-9609-7562346e05a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1521.682820] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802270, 'name': CreateVM_Task, 'duration_secs': 0.395482} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.686244] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1521.687368] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.687505] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.687913] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1521.688495] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a32102be-c975-43ef-ad67-a57767ab66ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.694058] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1521.694058] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fcb7b9-3df6-9556-33d6-3cbf74114f70" [ 1521.694058] env[62519]: _type = "Task" [ 1521.694058] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.703314] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fcb7b9-3df6-9556-33d6-3cbf74114f70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.972657] env[62519]: DEBUG nova.network.neutron [req-244bbb6f-bf84-4390-aec9-5ad0f78020c1 req-e9e6dacc-789e-4f0a-8860-5730e3242994 service nova] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Updated VIF entry in instance network info cache for port cd903c9d-5093-4f0a-9439-683a3b25eef0. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1521.973049] env[62519]: DEBUG nova.network.neutron [req-244bbb6f-bf84-4390-aec9-5ad0f78020c1 req-e9e6dacc-789e-4f0a-8860-5730e3242994 service nova] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Updating instance_info_cache with network_info: [{"id": "cd903c9d-5093-4f0a-9439-683a3b25eef0", "address": "fa:16:3e:00:46:e9", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd903c9d-50", "ovs_interfaceid": "cd903c9d-5093-4f0a-9439-683a3b25eef0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.984334] env[62519]: DEBUG oslo_vmware.api [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Task: {'id': task-1802271, 'name': ReconfigVM_Task, 'duration_secs': 0.158756} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.985340] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373663', 'volume_id': 'b3ec9fe2-f38d-46f0-977a-134fc940c266', 'name': 'volume-b3ec9fe2-f38d-46f0-977a-134fc940c266', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '91902e7f-8c15-447b-a3a8-04433434b1b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'b3ec9fe2-f38d-46f0-977a-134fc940c266', 'serial': 'b3ec9fe2-f38d-46f0-977a-134fc940c266'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1522.022635] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802269, 'name': ReconfigVM_Task, 'duration_secs': 0.608431} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.023627] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634/51eaac08-75fd-49f9-9b1a-cc2a2d799634.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1522.024020] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af1af4c7-52e2-4400-9480-3147090b597e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.026571] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.031335] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1522.031335] env[62519]: value = "task-1802272" [ 1522.031335] env[62519]: _type = "Task" [ 1522.031335] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.039491] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802272, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.169828] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1522.170681] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d4e88b60-99b9-46da-b105-7a7fe6d71522 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.187967] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 1b4efc0f-474f-4a32-b199-c14f27b183e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1522.188362] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 51eaac08-75fd-49f9-9b1a-cc2a2d799634 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1522.188362] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c8b7568b-ba07-4f65-818b-f84910209361 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1522.189774] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1522.189774] env[62519]: value = "task-1802273" [ 1522.189774] env[62519]: _type = "Task" [ 1522.189774] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.201227] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802273, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.208239] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fcb7b9-3df6-9556-33d6-3cbf74114f70, 'name': SearchDatastore_Task, 'duration_secs': 0.00913} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.208541] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.208771] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1522.209038] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.209205] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.209463] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.209782] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13f7684e-45cb-44e5-9898-9025f6119d8f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.218195] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.218451] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1522.220409] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5174ba14-8231-4950-a02c-5a7defdee32c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.228500] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1522.228500] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527576fb-4a25-0951-0a11-796a8f99b60d" [ 1522.228500] env[62519]: _type = "Task" [ 1522.228500] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.236566] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527576fb-4a25-0951-0a11-796a8f99b60d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.476171] env[62519]: DEBUG oslo_concurrency.lockutils [req-244bbb6f-bf84-4390-aec9-5ad0f78020c1 req-e9e6dacc-789e-4f0a-8860-5730e3242994 service nova] Releasing lock "refresh_cache-3b506d10-a427-47b8-ab5f-c35e450b7eb1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.541228] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802272, 'name': Rename_Task, 'duration_secs': 0.250962} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.542446] env[62519]: DEBUG nova.objects.instance [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Lazy-loading 'flavor' on Instance uuid 91902e7f-8c15-447b-a3a8-04433434b1b6 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1522.543733] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1522.544368] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4259ec4c-9e7e-4b95-9fb2-ec4626f9f757 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.551136] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1522.551136] env[62519]: value = "task-1802274" [ 1522.551136] env[62519]: _type = "Task" [ 1522.551136] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.559654] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802274, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.691198] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 24cb49c8-b2ef-4ede-aea6-6e34081beca1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1522.701930] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802273, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.739523] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527576fb-4a25-0951-0a11-796a8f99b60d, 'name': SearchDatastore_Task, 'duration_secs': 0.012734} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.740345] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30cee23f-544f-45a7-8680-ca516a672891 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.745874] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1522.745874] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c0d4c3-4ae0-c827-29cc-e35fc2324205" [ 1522.745874] env[62519]: _type = "Task" [ 1522.745874] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.754203] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c0d4c3-4ae0-c827-29cc-e35fc2324205, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.918415] env[62519]: DEBUG nova.compute.manager [req-ddc8fb3c-0f4c-461c-ba3d-6acb9b0b18ac req-3cd9f58a-f318-4419-993c-ae438c4e786b service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Received event network-changed-883e1605-a678-4428-b758-c717d5d49ce2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1522.918606] env[62519]: DEBUG nova.compute.manager [req-ddc8fb3c-0f4c-461c-ba3d-6acb9b0b18ac req-3cd9f58a-f318-4419-993c-ae438c4e786b service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Refreshing instance network info cache due to event network-changed-883e1605-a678-4428-b758-c717d5d49ce2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1522.918814] env[62519]: DEBUG oslo_concurrency.lockutils [req-ddc8fb3c-0f4c-461c-ba3d-6acb9b0b18ac req-3cd9f58a-f318-4419-993c-ae438c4e786b service nova] Acquiring lock "refresh_cache-a59be5e6-2316-4766-933a-4d01dfe4fec1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.918951] env[62519]: DEBUG oslo_concurrency.lockutils [req-ddc8fb3c-0f4c-461c-ba3d-6acb9b0b18ac req-3cd9f58a-f318-4419-993c-ae438c4e786b service nova] Acquired lock "refresh_cache-a59be5e6-2316-4766-933a-4d01dfe4fec1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.919346] env[62519]: DEBUG nova.network.neutron [req-ddc8fb3c-0f4c-461c-ba3d-6acb9b0b18ac req-3cd9f58a-f318-4419-993c-ae438c4e786b service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Refreshing network info cache for port 883e1605-a678-4428-b758-c717d5d49ce2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1523.062763] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802274, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.197616] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 27f9e890-4733-43aa-9bf1-351d42d75418 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1523.205395] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802273, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.257228] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c0d4c3-4ae0-c827-29cc-e35fc2324205, 'name': SearchDatastore_Task, 'duration_secs': 0.009591} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.257584] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.257891] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 3b506d10-a427-47b8-ab5f-c35e450b7eb1/3b506d10-a427-47b8-ab5f-c35e450b7eb1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1523.258219] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afc762a2-5b24-44f4-99d2-b7cab3dfd8e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.265375] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1523.265375] env[62519]: value = "task-1802275" [ 1523.265375] env[62519]: _type = "Task" [ 1523.265375] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.275325] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.549854] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed8c8cfc-95ee-41ae-9eb3-51c1813c2a2d tempest-VolumesAssistedSnapshotsTest-1756313024 tempest-VolumesAssistedSnapshotsTest-1756313024-project-admin] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.786s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.570386] env[62519]: DEBUG oslo_vmware.api [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802274, 'name': PowerOnVM_Task, 'duration_secs': 0.756455} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.572625] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1523.573765] env[62519]: DEBUG nova.compute.manager [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1523.575855] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4cf17f-3a21-41ba-b636-30775f97f437 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.706579] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8070aa59-3547-460a-b914-0e84620023d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1523.706795] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 40c7a9b8-d541-464a-ba87-76cfc183ae31 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1523.708506] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802273, 'name': CloneVM_Task, 'duration_secs': 1.494436} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.709918] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Created linked-clone VM from snapshot [ 1523.710151] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b892fc2d-2c15-4573-b631-ae07e374f5b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.721071] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Uploading image 1e6faf8f-6642-477c-a1fa-a8b2e31e7a75 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1523.747144] env[62519]: DEBUG oslo_vmware.rw_handles [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1523.747144] env[62519]: value = "vm-373670" [ 1523.747144] env[62519]: _type = "VirtualMachine" [ 1523.747144] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1523.747838] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-31feee08-58a4-42b1-815d-0e3e836c277b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.757452] env[62519]: DEBUG oslo_vmware.rw_handles [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lease: (returnval){ [ 1523.757452] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522dea2b-4e80-d72a-9039-049ab14f31e1" [ 1523.757452] env[62519]: _type = "HttpNfcLease" [ 1523.757452] env[62519]: } obtained for exporting VM: (result){ [ 1523.757452] env[62519]: value = "vm-373670" [ 1523.757452] env[62519]: _type = "VirtualMachine" [ 1523.757452] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1523.757452] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the lease: (returnval){ [ 1523.757452] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522dea2b-4e80-d72a-9039-049ab14f31e1" [ 1523.757452] env[62519]: _type = "HttpNfcLease" [ 1523.757452] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1523.764741] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1523.764741] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522dea2b-4e80-d72a-9039-049ab14f31e1" [ 1523.764741] env[62519]: _type = "HttpNfcLease" [ 1523.764741] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1523.776811] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479485} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.776904] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 3b506d10-a427-47b8-ab5f-c35e450b7eb1/3b506d10-a427-47b8-ab5f-c35e450b7eb1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1523.777220] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1523.777541] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fad4664b-12ed-44ea-aed3-6b6f86b53e75 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.786216] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1523.786216] env[62519]: value = "task-1802277" [ 1523.786216] env[62519]: _type = "Task" [ 1523.786216] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.787239] env[62519]: DEBUG nova.network.neutron [req-ddc8fb3c-0f4c-461c-ba3d-6acb9b0b18ac req-3cd9f58a-f318-4419-993c-ae438c4e786b service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Updated VIF entry in instance network info cache for port 883e1605-a678-4428-b758-c717d5d49ce2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1523.787657] env[62519]: DEBUG nova.network.neutron [req-ddc8fb3c-0f4c-461c-ba3d-6acb9b0b18ac req-3cd9f58a-f318-4419-993c-ae438c4e786b service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Updating instance_info_cache with network_info: [{"id": "883e1605-a678-4428-b758-c717d5d49ce2", "address": "fa:16:3e:5d:b5:47", "network": {"id": "45dcc816-e95d-44c3-8cf1-0e178b840ae4", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-504304918-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9498e9017e174096886dccaf5574a0ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap883e1605-a6", "ovs_interfaceid": "883e1605-a678-4428-b758-c717d5d49ce2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.800514] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802277, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.099121] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.211217] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 765cf18e-53a0-4cc6-ad0e-337a6f68915c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1524.211217] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1524.211515] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1524.265576] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1524.265576] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522dea2b-4e80-d72a-9039-049ab14f31e1" [ 1524.265576] env[62519]: _type = "HttpNfcLease" [ 1524.265576] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1524.265576] env[62519]: DEBUG oslo_vmware.rw_handles [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1524.265576] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522dea2b-4e80-d72a-9039-049ab14f31e1" [ 1524.265576] env[62519]: _type = "HttpNfcLease" [ 1524.265576] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1524.266019] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b76ba07-d6fb-4510-8f51-0f5f12942ae4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.275689] env[62519]: DEBUG oslo_vmware.rw_handles [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5219ffdb-35e5-de54-86f0-a52b72fdb517/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1524.275776] env[62519]: DEBUG oslo_vmware.rw_handles [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5219ffdb-35e5-de54-86f0-a52b72fdb517/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1524.334517] env[62519]: DEBUG oslo_concurrency.lockutils [req-ddc8fb3c-0f4c-461c-ba3d-6acb9b0b18ac req-3cd9f58a-f318-4419-993c-ae438c4e786b service nova] Releasing lock "refresh_cache-a59be5e6-2316-4766-933a-4d01dfe4fec1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.352408] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802277, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067676} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.353837] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1524.353837] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bf41d1-787b-44e6-9b4f-fa974f95b2b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.380714] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 3b506d10-a427-47b8-ab5f-c35e450b7eb1/3b506d10-a427-47b8-ab5f-c35e450b7eb1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1524.383696] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2453413-8117-491f-ae66-764905dcb74d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.405297] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1524.405297] env[62519]: value = "task-1802279" [ 1524.405297] env[62519]: _type = "Task" [ 1524.405297] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.419264] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802279, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.440780] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bae347a3-73f3-4206-b2ce-79cb4357273c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.846620] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8f6fe2-9dc4-4baa-8da3-c7ed57d33234 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.850122] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "029ee07c-705d-452b-9b14-385d69f2fbbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.850482] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "029ee07c-705d-452b-9b14-385d69f2fbbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.850831] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "029ee07c-705d-452b-9b14-385d69f2fbbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.851895] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "029ee07c-705d-452b-9b14-385d69f2fbbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.852212] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "029ee07c-705d-452b-9b14-385d69f2fbbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.855952] env[62519]: INFO nova.compute.manager [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Terminating instance [ 1524.861984] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25501be-8d17-48ed-b3fd-f2de3f16165d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.898320] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e9b040-1f11-4bdf-9c10-b5c26c14dcad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.907039] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96dd3c8c-e513-4177-835c-2c5abb632b03 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.911568] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.911892] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.912164] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.912402] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.912667] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.919237] env[62519]: INFO nova.compute.manager [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Terminating instance [ 1524.929666] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.936826] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.030484] env[62519]: DEBUG oslo_concurrency.lockutils [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquiring lock "2b391628-18a2-4606-8c59-58ba642cee50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.030716] env[62519]: DEBUG oslo_concurrency.lockutils [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "2b391628-18a2-4606-8c59-58ba642cee50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.030926] env[62519]: DEBUG oslo_concurrency.lockutils [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquiring lock "2b391628-18a2-4606-8c59-58ba642cee50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.031187] env[62519]: DEBUG oslo_concurrency.lockutils [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "2b391628-18a2-4606-8c59-58ba642cee50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.031600] env[62519]: DEBUG oslo_concurrency.lockutils [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "2b391628-18a2-4606-8c59-58ba642cee50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.033744] env[62519]: INFO nova.compute.manager [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Terminating instance [ 1525.368501] env[62519]: DEBUG nova.compute.manager [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1525.368501] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1525.370566] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d74e46b-cd97-4221-8312-9e6e72c7ed78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.378276] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1525.379121] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-693e2502-7b95-4133-ae3c-ec8be503e4a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.389678] env[62519]: DEBUG oslo_vmware.api [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1525.389678] env[62519]: value = "task-1802280" [ 1525.389678] env[62519]: _type = "Task" [ 1525.389678] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.402110] env[62519]: DEBUG oslo_vmware.api [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802280, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.423892] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802279, 'name': ReconfigVM_Task, 'duration_secs': 0.721204} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.424151] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 3b506d10-a427-47b8-ab5f-c35e450b7eb1/3b506d10-a427-47b8-ab5f-c35e450b7eb1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1525.425352] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3ea8f9a-2aa4-4614-92b8-a3a70071ce8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.433043] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1525.433043] env[62519]: value = "task-1802281" [ 1525.433043] env[62519]: _type = "Task" [ 1525.433043] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.439771] env[62519]: DEBUG nova.compute.manager [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1525.440048] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1525.440856] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1525.444210] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802281, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.444966] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2067c0ec-3ab1-42d9-8044-c2f2023ec871 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.452632] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1525.452972] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f0dc1d2-e424-42fc-8731-3051f3093817 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.459155] env[62519]: DEBUG oslo_vmware.api [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1525.459155] env[62519]: value = "task-1802282" [ 1525.459155] env[62519]: _type = "Task" [ 1525.459155] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.469440] env[62519]: DEBUG oslo_vmware.api [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.542098] env[62519]: DEBUG nova.compute.manager [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1525.542800] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1525.543636] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56d5ccb-b40c-43db-9006-27dda643c746 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.555698] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1525.556172] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a12a21da-c2c0-4735-82c9-4fcd7f3f8b6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.563516] env[62519]: DEBUG oslo_vmware.api [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1525.563516] env[62519]: value = "task-1802283" [ 1525.563516] env[62519]: _type = "Task" [ 1525.563516] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.578573] env[62519]: DEBUG oslo_vmware.api [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1802283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.587903] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.588311] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.594886] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "f0925a44-c15b-4415-99bc-1b2366292fe4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.595225] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "f0925a44-c15b-4415-99bc-1b2366292fe4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.900739] env[62519]: DEBUG oslo_vmware.api [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802280, 'name': PowerOffVM_Task, 'duration_secs': 0.274453} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.901288] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1525.902622] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1525.902622] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-009be15f-2108-401a-b2c0-5bd0d1b919ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.940514] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802281, 'name': Rename_Task, 'duration_secs': 0.146305} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.940884] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1525.941206] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6aaf8a6-f720-4b81-a9a3-6c483aaed780 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.947433] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1525.947433] env[62519]: value = "task-1802285" [ 1525.947433] env[62519]: _type = "Task" [ 1525.947433] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.951024] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1525.953032] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.879s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.953032] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.029s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.953436] env[62519]: INFO nova.compute.claims [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1525.956008] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.956197] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11720}} [ 1525.962519] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802285, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.970570] env[62519]: DEBUG oslo_vmware.api [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802282, 'name': PowerOffVM_Task, 'duration_secs': 0.213145} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.970832] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1525.971102] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1525.971384] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13b550e8-5d26-41f0-8cd1-4ac81257e427 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.007797] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1526.008011] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1526.008250] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Deleting the datastore file [datastore1] 029ee07c-705d-452b-9b14-385d69f2fbbb {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1526.008878] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1d8b241-5ca0-448f-9735-b60d926d705b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.016484] env[62519]: DEBUG oslo_vmware.api [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1526.016484] env[62519]: value = "task-1802287" [ 1526.016484] env[62519]: _type = "Task" [ 1526.016484] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.025548] env[62519]: DEBUG oslo_vmware.api [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802287, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.048181] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1526.048789] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1526.049009] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleting the datastore file [datastore1] 51eaac08-75fd-49f9-9b1a-cc2a2d799634 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1526.049397] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f2f6ebe7-343f-4c2a-aa0e-e86e30dab978 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.056263] env[62519]: DEBUG oslo_vmware.api [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1526.056263] env[62519]: value = "task-1802288" [ 1526.056263] env[62519]: _type = "Task" [ 1526.056263] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.064374] env[62519]: DEBUG oslo_vmware.api [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.071719] env[62519]: DEBUG oslo_vmware.api [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1802283, 'name': PowerOffVM_Task, 'duration_secs': 0.228331} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.071961] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1526.072145] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1526.072388] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbf1f805-4958-4608-bd2b-5e47ca193321 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.463530] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] There are 11 instances to clean {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11729}} [ 1526.463808] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 681ef7a9-3b24-450a-9034-6d30177995d7] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1526.465569] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802285, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.528660] env[62519]: DEBUG oslo_vmware.api [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802287, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.283478} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.528923] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1526.529124] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1526.529301] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1526.529474] env[62519]: INFO nova.compute.manager [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1526.529747] env[62519]: DEBUG oslo.service.loopingcall [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1526.529977] env[62519]: DEBUG nova.compute.manager [-] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1526.530089] env[62519]: DEBUG nova.network.neutron [-] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1526.566901] env[62519]: DEBUG oslo_vmware.api [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.344841} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.567274] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1526.567545] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1526.567778] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1526.567984] env[62519]: INFO nova.compute.manager [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1526.568257] env[62519]: DEBUG oslo.service.loopingcall [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1526.568510] env[62519]: DEBUG nova.compute.manager [-] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1526.568594] env[62519]: DEBUG nova.network.neutron [-] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1526.962898] env[62519]: DEBUG oslo_vmware.api [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802285, 'name': PowerOnVM_Task, 'duration_secs': 0.649408} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.964715] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1526.965041] env[62519]: INFO nova.compute.manager [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Took 8.85 seconds to spawn the instance on the hypervisor. [ 1526.965293] env[62519]: DEBUG nova.compute.manager [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1526.965705] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1526.966090] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1526.966425] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Deleting the datastore file [datastore1] 2b391628-18a2-4606-8c59-58ba642cee50 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1526.967432] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c450f3-ff9b-4d53-941e-7abc05927dc7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.970550] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 021c3287-5887-457e-9b3a-233308fb9b23] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1526.976329] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec6c1daa-7082-4546-8c74-fb9724d2ab33 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.989629] env[62519]: DEBUG oslo_vmware.api [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for the task: (returnval){ [ 1526.989629] env[62519]: value = "task-1802290" [ 1526.989629] env[62519]: _type = "Task" [ 1526.989629] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.999105] env[62519]: DEBUG oslo_vmware.api [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1802290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.215487] env[62519]: DEBUG nova.compute.manager [req-2fe22ef3-cd35-4c21-96f7-1df35b535171 req-25691a24-52e2-4804-bdb5-9bc79addb534 service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Received event network-vif-deleted-8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1527.215487] env[62519]: INFO nova.compute.manager [req-2fe22ef3-cd35-4c21-96f7-1df35b535171 req-25691a24-52e2-4804-bdb5-9bc79addb534 service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Neutron deleted interface 8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4; detaching it from the instance and deleting it from the info cache [ 1527.215487] env[62519]: DEBUG nova.network.neutron [req-2fe22ef3-cd35-4c21-96f7-1df35b535171 req-25691a24-52e2-4804-bdb5-9bc79addb534 service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.373384] env[62519]: DEBUG nova.network.neutron [-] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.480615] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 49221ea3-d457-4cf5-97a9-9ae74c4e86fb] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1527.508354] env[62519]: INFO nova.compute.manager [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Took 38.62 seconds to build instance. [ 1527.513749] env[62519]: DEBUG oslo_vmware.api [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Task: {'id': task-1802290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226593} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.517800] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1527.518130] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1527.518388] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1527.518643] env[62519]: INFO nova.compute.manager [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Took 1.98 seconds to destroy the instance on the hypervisor. [ 1527.518993] env[62519]: DEBUG oslo.service.loopingcall [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.519563] env[62519]: DEBUG nova.compute.manager [-] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1527.519698] env[62519]: DEBUG nova.network.neutron [-] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1527.696590] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d249463-6642-49b5-b201-8ae70951726e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.714180] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e06da65-a464-4f2e-81ff-cbd5aefcd589 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.722046] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0de1db19-d890-4527-bcb3-38aac93162f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.776707] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef888513-cf49-4ea0-b5ff-bd9c261bad6f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.785787] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea5b63d-a7bf-48b7-9591-04d573c0dfea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.805181] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquiring lock "91902e7f-8c15-447b-a3a8-04433434b1b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.805954] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.806295] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquiring lock "91902e7f-8c15-447b-a3a8-04433434b1b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.806368] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.806725] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.809917] env[62519]: INFO nova.compute.manager [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Terminating instance [ 1527.818925] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b60a32-a3a5-4113-8f43-1988b15165cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.828327] env[62519]: DEBUG nova.compute.manager [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1527.828980] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1527.851838] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed1d8fd-5539-4e46-b233-cd8bca51aca7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.855195] env[62519]: DEBUG nova.compute.manager [req-2fe22ef3-cd35-4c21-96f7-1df35b535171 req-25691a24-52e2-4804-bdb5-9bc79addb534 service nova] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Detach interface failed, port_id=8473d03f-ba3c-4b3a-a79f-2eedc6ab90a4, reason: Instance 51eaac08-75fd-49f9-9b1a-cc2a2d799634 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1527.866882] env[62519]: DEBUG nova.compute.provider_tree [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1527.870536] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1527.871061] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5162a29-4b31-4197-8d68-d7e96783dbfe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.876930] env[62519]: INFO nova.compute.manager [-] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Took 1.31 seconds to deallocate network for instance. [ 1527.880194] env[62519]: DEBUG oslo_vmware.api [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1527.880194] env[62519]: value = "task-1802291" [ 1527.880194] env[62519]: _type = "Task" [ 1527.880194] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.895958] env[62519]: DEBUG oslo_vmware.api [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.990561] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c612a1be-fb39-416d-a9d2-d206582e5aeb] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1528.052017] env[62519]: DEBUG nova.compute.manager [req-362cfad9-b7f8-4294-8e2e-50ca33bf2b82 req-f47a26d6-2e86-4aff-9c48-9d40eb483ccd service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Received event network-vif-deleted-d260fe42-dd87-4c6b-a292-ce14b9314974 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1528.052017] env[62519]: INFO nova.compute.manager [req-362cfad9-b7f8-4294-8e2e-50ca33bf2b82 req-f47a26d6-2e86-4aff-9c48-9d40eb483ccd service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Neutron deleted interface d260fe42-dd87-4c6b-a292-ce14b9314974; detaching it from the instance and deleting it from the info cache [ 1528.052017] env[62519]: DEBUG nova.network.neutron [req-362cfad9-b7f8-4294-8e2e-50ca33bf2b82 req-f47a26d6-2e86-4aff-9c48-9d40eb483ccd service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.213657] env[62519]: DEBUG nova.network.neutron [-] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.372998] env[62519]: DEBUG nova.scheduler.client.report [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1528.385550] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.394801] env[62519]: DEBUG oslo_vmware.api [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802291, 'name': PowerOffVM_Task, 'duration_secs': 0.389788} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.395464] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1528.395464] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1528.395684] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7de6507e-0d2a-493a-ad9e-9adedb7e6232 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.487043] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1528.487496] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1528.487496] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Deleting the datastore file [datastore1] 91902e7f-8c15-447b-a3a8-04433434b1b6 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1528.487690] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-766778c8-7622-406a-8499-b4788443ebe8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.494653] env[62519]: DEBUG oslo_vmware.api [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for the task: (returnval){ [ 1528.494653] env[62519]: value = "task-1802293" [ 1528.494653] env[62519]: _type = "Task" [ 1528.494653] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.498610] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 31f55ece-82e6-40ad-ad7c-1af645f307bf] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1528.508077] env[62519]: DEBUG oslo_vmware.api [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.554489] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5771e6cb-b82a-46d8-bb5e-9bfcb687941f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.565584] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b26b3d7-9dc7-4e63-b704-cc95d619d861 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.597934] env[62519]: DEBUG nova.compute.manager [req-362cfad9-b7f8-4294-8e2e-50ca33bf2b82 req-f47a26d6-2e86-4aff-9c48-9d40eb483ccd service nova] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Detach interface failed, port_id=d260fe42-dd87-4c6b-a292-ce14b9314974, reason: Instance 029ee07c-705d-452b-9b14-385d69f2fbbb could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1528.716406] env[62519]: INFO nova.compute.manager [-] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Took 2.19 seconds to deallocate network for instance. [ 1528.789443] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a504ec09-6c63-4e26-8181-764f1dd4dc8f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.796694] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b22b6e1a-be00-4f32-a38b-7807bfc481d2 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Suspending the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1528.796694] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-1e6819a4-9fec-4e5f-9c98-e6a84524e288 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.803245] env[62519]: DEBUG oslo_vmware.api [None req-b22b6e1a-be00-4f32-a38b-7807bfc481d2 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1528.803245] env[62519]: value = "task-1802294" [ 1528.803245] env[62519]: _type = "Task" [ 1528.803245] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.815361] env[62519]: DEBUG oslo_vmware.api [None req-b22b6e1a-be00-4f32-a38b-7807bfc481d2 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802294, 'name': SuspendVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.827346] env[62519]: DEBUG nova.network.neutron [-] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.880275] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.928s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.881240] env[62519]: DEBUG nova.compute.manager [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1528.885895] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.398s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.888954] env[62519]: INFO nova.compute.claims [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1529.006994] env[62519]: DEBUG oslo_vmware.api [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Task: {'id': task-1802293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.452322} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.007352] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1529.007595] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1529.007793] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1529.007966] env[62519]: INFO nova.compute.manager [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1529.008299] env[62519]: DEBUG oslo.service.loopingcall [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.008700] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c07e4d30-44bc-417b-8137-97f974aec932] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1529.010732] env[62519]: DEBUG nova.compute.manager [-] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1529.010857] env[62519]: DEBUG nova.network.neutron [-] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1529.016990] env[62519]: DEBUG oslo_concurrency.lockutils [None req-384607cc-9c11-4f58-8b9f-4201a167011b tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.975s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.222755] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.269624] env[62519]: DEBUG nova.compute.manager [req-2840608d-830d-4c78-a1f3-9b33d14caa6f req-d120aa05-7dce-4859-93ba-c21e335e005c service nova] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Received event network-vif-deleted-ac832bcb-7bb4-49de-be18-a8fd0d8ee16e {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1529.314131] env[62519]: DEBUG oslo_vmware.api [None req-b22b6e1a-be00-4f32-a38b-7807bfc481d2 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802294, 'name': SuspendVM_Task} progress is 58%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.329977] env[62519]: INFO nova.compute.manager [-] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Took 1.81 seconds to deallocate network for instance. [ 1529.399037] env[62519]: DEBUG nova.compute.utils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1529.400779] env[62519]: DEBUG nova.compute.manager [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1529.401020] env[62519]: DEBUG nova.network.neutron [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1529.453644] env[62519]: DEBUG nova.policy [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95893859ec5841cf9d739a73be88f3fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08ccb6256cb446e1837e04580892a31a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1529.516430] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 1118c1e5-1aa8-4f52-9fb9-e86531bf83d1] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1529.522987] env[62519]: DEBUG nova.compute.manager [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1529.815611] env[62519]: DEBUG oslo_vmware.api [None req-b22b6e1a-be00-4f32-a38b-7807bfc481d2 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802294, 'name': SuspendVM_Task, 'duration_secs': 0.760237} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.815927] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b22b6e1a-be00-4f32-a38b-7807bfc481d2 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Suspended the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1529.816143] env[62519]: DEBUG nova.compute.manager [None req-b22b6e1a-be00-4f32-a38b-7807bfc481d2 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1529.816924] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcad6099-d961-47da-aeb0-92bdcb9b138c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.839031] env[62519]: DEBUG oslo_concurrency.lockutils [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.888424] env[62519]: DEBUG nova.network.neutron [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Successfully created port: 7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1529.908397] env[62519]: DEBUG nova.compute.manager [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1530.019992] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: ceadcb5e-ee82-4441-b046-f79b973ec05e] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1530.052097] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.074666] env[62519]: DEBUG nova.network.neutron [-] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.473043] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730649dc-5514-4990-8d3e-9960929e1f6e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.482092] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5749c4db-6a0c-48f2-9796-7ebc33316663 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.519288] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c96ff67-ccb2-4943-828f-04ebb74c46fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.526882] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c616d8ec-f28a-4430-a336-1ea4790fd511] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1530.531148] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab33c06d-e07b-481f-b728-f0cd58086710 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.545867] env[62519]: DEBUG nova.compute.provider_tree [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1530.577874] env[62519]: INFO nova.compute.manager [-] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Took 1.57 seconds to deallocate network for instance. [ 1530.922490] env[62519]: DEBUG nova.compute.manager [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1530.953853] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1530.955038] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1530.955038] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1530.955038] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1530.955038] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1530.955038] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1530.955038] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1530.955353] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1530.955353] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1530.955443] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1530.955602] env[62519]: DEBUG nova.virt.hardware [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1530.956571] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ceb5a84-22b1-4ff0-bde2-0cd08d92cc01 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.965144] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d6dd24-eed8-4990-8b6e-b24f81656f80 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.037185] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 8b178cc0-db79-4ec2-8962-f31b936f8eff] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1531.082060] env[62519]: ERROR nova.scheduler.client.report [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [req-bd0ee635-6307-4b8a-955b-1a8a7e8ce258] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bd0ee635-6307-4b8a-955b-1a8a7e8ce258"}]} [ 1531.090840] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.113145] env[62519]: DEBUG nova.scheduler.client.report [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1531.134120] env[62519]: DEBUG nova.scheduler.client.report [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1531.134615] env[62519]: DEBUG nova.compute.provider_tree [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1531.148329] env[62519]: DEBUG nova.scheduler.client.report [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1531.169432] env[62519]: DEBUG nova.scheduler.client.report [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1531.498250] env[62519]: DEBUG nova.compute.manager [req-e23b9dbf-744b-4b4f-bfde-1c5cfee0c63b req-b17b1f89-bfe1-4f7e-a6e1-c1ab63a3d8c9 service nova] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Received event network-vif-deleted-57934d0f-3be6-4e2d-9e86-b5500fae4b3b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1531.541297] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: f6e29557-05ad-4a11-bd01-0315926c0413] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1531.726431] env[62519]: DEBUG nova.compute.manager [req-b27b10db-6ea7-4235-866a-08984261bed6 req-7c0182be-4e5c-4c39-842c-a85287b6db45 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Received event network-vif-plugged-7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1531.726647] env[62519]: DEBUG oslo_concurrency.lockutils [req-b27b10db-6ea7-4235-866a-08984261bed6 req-7c0182be-4e5c-4c39-842c-a85287b6db45 service nova] Acquiring lock "09eefc1a-011b-4d2c-ab75-a1fcee740907-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.726845] env[62519]: DEBUG oslo_concurrency.lockutils [req-b27b10db-6ea7-4235-866a-08984261bed6 req-7c0182be-4e5c-4c39-842c-a85287b6db45 service nova] Lock "09eefc1a-011b-4d2c-ab75-a1fcee740907-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.727075] env[62519]: DEBUG oslo_concurrency.lockutils [req-b27b10db-6ea7-4235-866a-08984261bed6 req-7c0182be-4e5c-4c39-842c-a85287b6db45 service nova] Lock "09eefc1a-011b-4d2c-ab75-a1fcee740907-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.727186] env[62519]: DEBUG nova.compute.manager [req-b27b10db-6ea7-4235-866a-08984261bed6 req-7c0182be-4e5c-4c39-842c-a85287b6db45 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] No waiting events found dispatching network-vif-plugged-7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1531.727387] env[62519]: WARNING nova.compute.manager [req-b27b10db-6ea7-4235-866a-08984261bed6 req-7c0182be-4e5c-4c39-842c-a85287b6db45 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Received unexpected event network-vif-plugged-7948e8a1-83dc-4329-aa44-813b4e25c1c7 for instance with vm_state building and task_state spawning. [ 1531.740244] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab948e7-c51a-4f98-a7e1-722277442590 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.748572] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57a333f-81fd-4454-a123-20bf96cc00da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.780740] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c418d73-33fd-4573-8c8a-11061bab60d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.788932] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1a54b9-0348-4d7b-85f0-bc6c5ae5cafc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.802924] env[62519]: DEBUG nova.compute.provider_tree [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1531.805992] env[62519]: DEBUG nova.network.neutron [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Successfully updated port: 7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1532.047239] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.047239] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances with incomplete migration {{(pid=62519) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11758}} [ 1532.311968] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.311968] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquired lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.311968] env[62519]: DEBUG nova.network.neutron [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1532.344052] env[62519]: DEBUG nova.scheduler.client.report [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1532.344386] env[62519]: DEBUG nova.compute.provider_tree [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 68 to 69 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1532.344582] env[62519]: DEBUG nova.compute.provider_tree [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1532.393936] env[62519]: DEBUG nova.compute.manager [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1532.394913] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42ea5e5-4407-4455-b390-4c25a58628b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.661371] env[62519]: DEBUG oslo_vmware.rw_handles [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5219ffdb-35e5-de54-86f0-a52b72fdb517/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1532.662386] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f651be58-2568-4b0e-9009-8702cc6c7cd8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.668638] env[62519]: DEBUG oslo_vmware.rw_handles [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5219ffdb-35e5-de54-86f0-a52b72fdb517/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1532.668808] env[62519]: ERROR oslo_vmware.rw_handles [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5219ffdb-35e5-de54-86f0-a52b72fdb517/disk-0.vmdk due to incomplete transfer. [ 1532.669030] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b01257ec-181b-4939-97bb-e49aeb6a05dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.676110] env[62519]: DEBUG oslo_vmware.rw_handles [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5219ffdb-35e5-de54-86f0-a52b72fdb517/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1532.676366] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Uploaded image 1e6faf8f-6642-477c-a1fa-a8b2e31e7a75 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1532.678663] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1532.678890] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7fee023b-ce10-4304-86c4-cfdc3517264b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.684352] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1532.684352] env[62519]: value = "task-1802295" [ 1532.684352] env[62519]: _type = "Task" [ 1532.684352] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.691827] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802295, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.850974] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.965s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.851584] env[62519]: DEBUG nova.compute.manager [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1532.854311] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.910s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.854532] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.856703] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.825s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.858156] env[62519]: INFO nova.compute.claims [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1532.868667] env[62519]: DEBUG nova.network.neutron [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1532.884033] env[62519]: INFO nova.scheduler.client.report [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Deleted allocations for instance f7d5c77d-6c78-4969-b511-2b03ab624c84 [ 1532.907923] env[62519]: INFO nova.compute.manager [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] instance snapshotting [ 1532.908110] env[62519]: WARNING nova.compute.manager [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1532.911069] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e41a9ff-4175-49d3-bdc3-ffd23b0cd8fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.932616] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20862f8-57ac-4f2a-883d-e9ff05cabfbe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.038427] env[62519]: DEBUG nova.network.neutron [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updating instance_info_cache with network_info: [{"id": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "address": "fa:16:3e:dd:49:e8", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7948e8a1-83", "ovs_interfaceid": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.194828] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802295, 'name': Destroy_Task, 'duration_secs': 0.381474} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.195042] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Destroyed the VM [ 1533.195223] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1533.195504] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-110a2910-af4a-4b4c-bf49-9994506c164c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.202212] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1533.202212] env[62519]: value = "task-1802296" [ 1533.202212] env[62519]: _type = "Task" [ 1533.202212] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.209937] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802296, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.363436] env[62519]: DEBUG nova.compute.utils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1533.364857] env[62519]: DEBUG nova.compute.manager [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1533.365349] env[62519]: DEBUG nova.network.neutron [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1533.395145] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f492c075-bf4f-4007-8aab-a79cc6b2c698 tempest-ServerExternalEventsTest-627262165 tempest-ServerExternalEventsTest-627262165-project-member] Lock "f7d5c77d-6c78-4969-b511-2b03ab624c84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.934s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.409847] env[62519]: DEBUG nova.policy [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a05f0649d07c4b75861bf7ea9d86784e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '640853b7caa24c45afca3f1db04ec053', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1533.447051] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1533.447051] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-85329983-f9fd-4181-9ebe-b4e8e55857ff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.453721] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1533.453721] env[62519]: value = "task-1802297" [ 1533.453721] env[62519]: _type = "Task" [ 1533.453721] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.461851] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802297, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.541915] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Releasing lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.541915] env[62519]: DEBUG nova.compute.manager [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Instance network_info: |[{"id": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "address": "fa:16:3e:dd:49:e8", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7948e8a1-83", "ovs_interfaceid": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1533.542111] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:49:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98d96b75-ac36-499a-adc2-130c8c1d55ca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7948e8a1-83dc-4329-aa44-813b4e25c1c7', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1533.549667] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Creating folder: Project (08ccb6256cb446e1837e04580892a31a). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1533.550404] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46bad335-cfa5-4fa4-acc3-6f8ba3ee60b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.560663] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Created folder: Project (08ccb6256cb446e1837e04580892a31a) in parent group-v373567. [ 1533.560848] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Creating folder: Instances. Parent ref: group-v373671. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1533.561201] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11f77f25-2b3e-47dd-9016-65ce818cf469 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.570112] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Created folder: Instances in parent group-v373671. [ 1533.570461] env[62519]: DEBUG oslo.service.loopingcall [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1533.570661] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1533.570862] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b4f5ee0-a63e-4fe1-b091-8e0124cb95fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.590340] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1533.590340] env[62519]: value = "task-1802300" [ 1533.590340] env[62519]: _type = "Task" [ 1533.590340] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.598061] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802300, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.711856] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802296, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.816756] env[62519]: DEBUG nova.network.neutron [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Successfully created port: ca6c27e1-d55f-43cc-8dee-29ddc604baad {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1533.870530] env[62519]: DEBUG nova.compute.manager [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1533.902022] env[62519]: DEBUG nova.compute.manager [req-cf6e1794-e65d-4896-895a-fc6d484077a0 req-7c19496d-5326-451c-889c-5fb4a71a780e service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Received event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1533.902022] env[62519]: DEBUG nova.compute.manager [req-cf6e1794-e65d-4896-895a-fc6d484077a0 req-7c19496d-5326-451c-889c-5fb4a71a780e service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing instance network info cache due to event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1533.902022] env[62519]: DEBUG oslo_concurrency.lockutils [req-cf6e1794-e65d-4896-895a-fc6d484077a0 req-7c19496d-5326-451c-889c-5fb4a71a780e service nova] Acquiring lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.902022] env[62519]: DEBUG oslo_concurrency.lockutils [req-cf6e1794-e65d-4896-895a-fc6d484077a0 req-7c19496d-5326-451c-889c-5fb4a71a780e service nova] Acquired lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.902022] env[62519]: DEBUG nova.network.neutron [req-cf6e1794-e65d-4896-895a-fc6d484077a0 req-7c19496d-5326-451c-889c-5fb4a71a780e service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1533.963198] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802297, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.103669] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802300, 'name': CreateVM_Task, 'duration_secs': 0.353599} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.103904] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1534.108278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.108278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.108278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1534.108278] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1942106e-d722-463c-93b4-67dfa701e376 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.113328] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1534.113328] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ab5c9b-1888-4028-2e60-12ff4f049f66" [ 1534.113328] env[62519]: _type = "Task" [ 1534.113328] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.121617] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ab5c9b-1888-4028-2e60-12ff4f049f66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.213393] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802296, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.426235] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3072a196-dd54-4328-aff7-15a2c1a1a68c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.434557] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf65063-4bf1-4fec-a3e0-92787d7f1213 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.474823] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b446b7ce-2b58-4ed0-8241-e16b4ce8fa34 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.488644] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a502bee-30f9-441c-a06b-96f095d1b2b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.492773] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802297, 'name': CreateSnapshot_Task, 'duration_secs': 0.596668} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.493049] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1534.494107] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8baa8723-54dc-4622-a114-d28214496d2b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.506503] env[62519]: DEBUG nova.compute.provider_tree [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1534.545020] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.545020] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.624011] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ab5c9b-1888-4028-2e60-12ff4f049f66, 'name': SearchDatastore_Task, 'duration_secs': 0.009211} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.624185] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.624318] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1534.624542] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.624684] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.624852] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1534.625149] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82b0e792-1032-415c-b93d-e0e5dab3ed8f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.637035] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1534.637560] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1534.638017] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49fa41d3-0d78-4d26-b477-8541bfc2783a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.644527] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1534.644527] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bc1c12-6ebe-30ea-d406-4a559480c8d0" [ 1534.644527] env[62519]: _type = "Task" [ 1534.644527] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.652051] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bc1c12-6ebe-30ea-d406-4a559480c8d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.691596] env[62519]: DEBUG nova.network.neutron [req-cf6e1794-e65d-4896-895a-fc6d484077a0 req-7c19496d-5326-451c-889c-5fb4a71a780e service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updated VIF entry in instance network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1534.691949] env[62519]: DEBUG nova.network.neutron [req-cf6e1794-e65d-4896-895a-fc6d484077a0 req-7c19496d-5326-451c-889c-5fb4a71a780e service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updating instance_info_cache with network_info: [{"id": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "address": "fa:16:3e:dd:49:e8", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7948e8a1-83", "ovs_interfaceid": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.713718] env[62519]: DEBUG oslo_vmware.api [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802296, 'name': RemoveSnapshot_Task, 'duration_secs': 1.393734} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.714011] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1534.714264] env[62519]: INFO nova.compute.manager [None req-6f916e62-743a-4a1e-b53d-f7984ee998e1 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Took 15.13 seconds to snapshot the instance on the hypervisor. [ 1534.883772] env[62519]: DEBUG nova.compute.manager [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1534.914535] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1534.914780] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1534.914935] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1534.915135] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1534.915379] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1534.915542] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1534.915745] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1534.915949] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1534.916169] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1534.916386] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1534.916557] env[62519]: DEBUG nova.virt.hardware [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1534.920148] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92ef400-b806-4092-aa13-1cac3f983550 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.925271] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d1d523-f9b8-42a9-a750-dd2c5a9dadeb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.011075] env[62519]: DEBUG nova.scheduler.client.report [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1535.022768] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1535.023293] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-20e9aa74-e011-452e-86e0-fd4b4e3dc27f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.032567] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1535.032567] env[62519]: value = "task-1802301" [ 1535.032567] env[62519]: _type = "Task" [ 1535.032567] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.041074] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802301, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.052734] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.053396] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1535.155878] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bc1c12-6ebe-30ea-d406-4a559480c8d0, 'name': SearchDatastore_Task, 'duration_secs': 0.026701} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.156773] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-266adf14-07f7-45af-8a91-4ba8df62d300 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.162794] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1535.162794] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c0029e-9575-776d-cc86-b658e1d001c3" [ 1535.162794] env[62519]: _type = "Task" [ 1535.162794] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.171294] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c0029e-9575-776d-cc86-b658e1d001c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.195398] env[62519]: DEBUG oslo_concurrency.lockutils [req-cf6e1794-e65d-4896-895a-fc6d484077a0 req-7c19496d-5326-451c-889c-5fb4a71a780e service nova] Releasing lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.516809] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.517129] env[62519]: DEBUG nova.compute.manager [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1535.519790] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.252s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.520317] env[62519]: DEBUG nova.objects.instance [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1535.542609] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802301, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.673864] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c0029e-9575-776d-cc86-b658e1d001c3, 'name': SearchDatastore_Task, 'duration_secs': 0.010163} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.674346] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.677031] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 09eefc1a-011b-4d2c-ab75-a1fcee740907/09eefc1a-011b-4d2c-ab75-a1fcee740907.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1535.677031] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7b67023-c682-4362-94a6-8db21e8d00e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.681230] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1535.681230] env[62519]: value = "task-1802302" [ 1535.681230] env[62519]: _type = "Task" [ 1535.681230] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.693019] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.701338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquiring lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.701338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.701338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquiring lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.701338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.701338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.701917] env[62519]: INFO nova.compute.manager [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Terminating instance [ 1536.008858] env[62519]: DEBUG nova.network.neutron [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Successfully updated port: ca6c27e1-d55f-43cc-8dee-29ddc604baad {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1536.024867] env[62519]: DEBUG nova.compute.utils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1536.031353] env[62519]: DEBUG nova.compute.manager [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1536.031646] env[62519]: DEBUG nova.network.neutron [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1536.052206] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802301, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.072444] env[62519]: DEBUG nova.compute.manager [req-b51b932b-3f33-4f57-b30c-bc0f98bef3d0 req-2c2f9410-a281-4571-920e-9e20c7d12867 service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Received event network-vif-plugged-ca6c27e1-d55f-43cc-8dee-29ddc604baad {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1536.072444] env[62519]: DEBUG oslo_concurrency.lockutils [req-b51b932b-3f33-4f57-b30c-bc0f98bef3d0 req-2c2f9410-a281-4571-920e-9e20c7d12867 service nova] Acquiring lock "22380aef-c725-43a0-a957-06ced9518c21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.072444] env[62519]: DEBUG oslo_concurrency.lockutils [req-b51b932b-3f33-4f57-b30c-bc0f98bef3d0 req-2c2f9410-a281-4571-920e-9e20c7d12867 service nova] Lock "22380aef-c725-43a0-a957-06ced9518c21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.072444] env[62519]: DEBUG oslo_concurrency.lockutils [req-b51b932b-3f33-4f57-b30c-bc0f98bef3d0 req-2c2f9410-a281-4571-920e-9e20c7d12867 service nova] Lock "22380aef-c725-43a0-a957-06ced9518c21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.072444] env[62519]: DEBUG nova.compute.manager [req-b51b932b-3f33-4f57-b30c-bc0f98bef3d0 req-2c2f9410-a281-4571-920e-9e20c7d12867 service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] No waiting events found dispatching network-vif-plugged-ca6c27e1-d55f-43cc-8dee-29ddc604baad {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1536.072444] env[62519]: WARNING nova.compute.manager [req-b51b932b-3f33-4f57-b30c-bc0f98bef3d0 req-2c2f9410-a281-4571-920e-9e20c7d12867 service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Received unexpected event network-vif-plugged-ca6c27e1-d55f-43cc-8dee-29ddc604baad for instance with vm_state building and task_state spawning. [ 1536.097448] env[62519]: DEBUG nova.policy [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb2a4995d71549b89f020dfdd3a6d470', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd004a7d2ec074aa39666ae15861b9440', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1536.193936] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802302, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483871} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.194429] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 09eefc1a-011b-4d2c-ab75-a1fcee740907/09eefc1a-011b-4d2c-ab75-a1fcee740907.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1536.194713] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1536.194999] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06b8d4a0-aaae-42ea-a520-9caf0f1c5d17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.202203] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1536.202203] env[62519]: value = "task-1802303" [ 1536.202203] env[62519]: _type = "Task" [ 1536.202203] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.211830] env[62519]: DEBUG nova.compute.manager [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1536.212081] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1536.212419] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802303, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.213241] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8fc993-bb6b-4edb-9e04-bcc7d3d5e990 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.220808] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1536.221050] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e87974f6-0eff-45ac-be19-407554856814 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.228359] env[62519]: DEBUG oslo_vmware.api [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1536.228359] env[62519]: value = "task-1802304" [ 1536.228359] env[62519]: _type = "Task" [ 1536.228359] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.247242] env[62519]: DEBUG oslo_vmware.api [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.498339] env[62519]: DEBUG nova.network.neutron [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Successfully created port: e362cda5-6e64-4311-96a1-880e1f66ab32 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1536.511072] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquiring lock "refresh_cache-22380aef-c725-43a0-a957-06ced9518c21" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1536.511245] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquired lock "refresh_cache-22380aef-c725-43a0-a957-06ced9518c21" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.511401] env[62519]: DEBUG nova.network.neutron [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1536.531507] env[62519]: DEBUG nova.compute.manager [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1536.541977] env[62519]: DEBUG oslo_concurrency.lockutils [None req-261745f1-111f-411a-bbb8-413ef8a82a9e tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.543459] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.953s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.543459] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.546104] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.519s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.546104] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.548860] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.820s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.548860] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.549934] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.921s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.551496] env[62519]: INFO nova.compute.claims [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1536.568062] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802301, 'name': CloneVM_Task} progress is 95%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.598282] env[62519]: INFO nova.scheduler.client.report [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Deleted allocations for instance 1462d213-3f9a-4c60-8056-0b68f20a4939 [ 1536.607284] env[62519]: INFO nova.scheduler.client.report [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Deleted allocations for instance 40c7a9b8-d541-464a-ba87-76cfc183ae31 [ 1536.639631] env[62519]: INFO nova.scheduler.client.report [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Deleted allocations for instance 42497ab5-cce9-4614-a6d1-dffbf6764d7b [ 1536.714135] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802303, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068706} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.714489] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1536.715176] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c23a694-39d7-43af-bbf8-20bd873dd4ef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.742884] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 09eefc1a-011b-4d2c-ab75-a1fcee740907/09eefc1a-011b-4d2c-ab75-a1fcee740907.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1536.746807] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d11c990-0444-4f7b-a552-04f0dc81d012 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.771490] env[62519]: DEBUG oslo_vmware.api [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802304, 'name': PowerOffVM_Task, 'duration_secs': 0.265879} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.772872] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1536.773333] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1536.773421] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1536.773421] env[62519]: value = "task-1802305" [ 1536.773421] env[62519]: _type = "Task" [ 1536.773421] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.773757] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b1999e8-aa34-4d7c-a448-2b2d7674f5be {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.784850] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802305, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.883400] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1536.883540] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1536.884079] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Deleting the datastore file [datastore1] 4c336ad1-8ce6-4f89-843e-0baae0d0dbda {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1536.884079] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-519d7601-c8f2-4eba-af66-3a87b22ad31a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.893013] env[62519]: DEBUG oslo_vmware.api [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for the task: (returnval){ [ 1536.893013] env[62519]: value = "task-1802307" [ 1536.893013] env[62519]: _type = "Task" [ 1536.893013] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.902531] env[62519]: DEBUG oslo_vmware.api [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.048761] env[62519]: DEBUG nova.network.neutron [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1537.056609] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802301, 'name': CloneVM_Task, 'duration_secs': 1.740811} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.056854] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Created linked-clone VM from snapshot [ 1537.057699] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf63632-fb76-4d28-8c39-e7dbd0c29f34 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.071668] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Uploading image 5bc9589c-1176-4ec2-9bd9-420d0963e893 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1537.095823] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "refresh_cache-c8b7568b-ba07-4f65-818b-f84910209361" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.095823] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquired lock "refresh_cache-c8b7568b-ba07-4f65-818b-f84910209361" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.095823] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Forcefully refreshing network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1537.105044] env[62519]: DEBUG oslo_vmware.rw_handles [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1537.105044] env[62519]: value = "vm-373675" [ 1537.105044] env[62519]: _type = "VirtualMachine" [ 1537.105044] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1537.105044] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bfd5bea9-875f-46a4-929f-ce238bfb3b63 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.114062] env[62519]: DEBUG oslo_vmware.rw_handles [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lease: (returnval){ [ 1537.114062] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529bfec0-5556-4e2e-eee3-8f7529a375ec" [ 1537.114062] env[62519]: _type = "HttpNfcLease" [ 1537.114062] env[62519]: } obtained for exporting VM: (result){ [ 1537.114062] env[62519]: value = "vm-373675" [ 1537.114062] env[62519]: _type = "VirtualMachine" [ 1537.114062] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1537.115872] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the lease: (returnval){ [ 1537.115872] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529bfec0-5556-4e2e-eee3-8f7529a375ec" [ 1537.115872] env[62519]: _type = "HttpNfcLease" [ 1537.115872] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1537.115872] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ac03e3f5-b1cf-4968-b811-42fd40903176 tempest-ServerShowV254Test-1623148250 tempest-ServerShowV254Test-1623148250-project-member] Lock "1462d213-3f9a-4c60-8056-0b68f20a4939" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.291s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.121388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed5a0cc8-9947-43e0-a22d-d24f2fbe8a1c tempest-DeleteServersAdminTestJSON-456585889 tempest-DeleteServersAdminTestJSON-456585889-project-member] Lock "40c7a9b8-d541-464a-ba87-76cfc183ae31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.121s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.134015] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1537.134015] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529bfec0-5556-4e2e-eee3-8f7529a375ec" [ 1537.134015] env[62519]: _type = "HttpNfcLease" [ 1537.134015] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1537.134015] env[62519]: DEBUG oslo_vmware.rw_handles [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1537.134015] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529bfec0-5556-4e2e-eee3-8f7529a375ec" [ 1537.134015] env[62519]: _type = "HttpNfcLease" [ 1537.134015] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1537.134015] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07905c54-45e1-405f-8c0b-f49b4642b7d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.142550] env[62519]: DEBUG oslo_vmware.rw_handles [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5227b18a-0d4e-21a1-5a7b-6a9040892fac/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1537.142883] env[62519]: DEBUG oslo_vmware.rw_handles [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5227b18a-0d4e-21a1-5a7b-6a9040892fac/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1537.221542] env[62519]: DEBUG oslo_concurrency.lockutils [None req-804e8c96-430f-4309-af0c-933dbd81ccef tempest-ServerShowV247Test-1912217433 tempest-ServerShowV247Test-1912217433-project-member] Lock "42497ab5-cce9-4614-a6d1-dffbf6764d7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.724s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.256687] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8c497540-22b4-42a1-93cc-76ce840e9290 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.291676] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802305, 'name': ReconfigVM_Task, 'duration_secs': 0.294663} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.295467] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 09eefc1a-011b-4d2c-ab75-a1fcee740907/09eefc1a-011b-4d2c-ab75-a1fcee740907.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1537.296519] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-042a0fdf-9038-4a40-b594-714dc84bf39c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.310438] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1537.310438] env[62519]: value = "task-1802309" [ 1537.310438] env[62519]: _type = "Task" [ 1537.310438] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.321268] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802309, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.327966] env[62519]: DEBUG nova.network.neutron [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Updating instance_info_cache with network_info: [{"id": "ca6c27e1-d55f-43cc-8dee-29ddc604baad", "address": "fa:16:3e:dd:89:0e", "network": {"id": "7d9f5ffd-ddab-49e8-9121-a45e95fbd6f1", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-947344625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "640853b7caa24c45afca3f1db04ec053", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca6c27e1-d5", "ovs_interfaceid": "ca6c27e1-d55f-43cc-8dee-29ddc604baad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.403197] env[62519]: DEBUG oslo_vmware.api [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Task: {'id': task-1802307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180723} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.403545] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1537.403740] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1537.403982] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1537.404137] env[62519]: INFO nova.compute.manager [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1537.404338] env[62519]: DEBUG oslo.service.loopingcall [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1537.404529] env[62519]: DEBUG nova.compute.manager [-] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1537.404623] env[62519]: DEBUG nova.network.neutron [-] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1537.542668] env[62519]: DEBUG nova.compute.manager [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1537.578024] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1537.578301] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1537.578408] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1537.579734] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1537.579734] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1537.579734] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1537.579734] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1537.579734] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1537.579734] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1537.579734] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1537.580152] env[62519]: DEBUG nova.virt.hardware [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1537.581096] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045c6144-697b-4057-99ca-4ac7982831fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.592708] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e27577-b91e-46f8-9b5c-3dee009affff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.661838] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1537.822208] env[62519]: DEBUG nova.compute.manager [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1537.824517] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc171b09-26fa-4846-82ae-a60a58f7f557 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.830516] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Releasing lock "refresh_cache-22380aef-c725-43a0-a957-06ced9518c21" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1537.830871] env[62519]: DEBUG nova.compute.manager [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Instance network_info: |[{"id": "ca6c27e1-d55f-43cc-8dee-29ddc604baad", "address": "fa:16:3e:dd:89:0e", "network": {"id": "7d9f5ffd-ddab-49e8-9121-a45e95fbd6f1", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-947344625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "640853b7caa24c45afca3f1db04ec053", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca6c27e1-d5", "ovs_interfaceid": "ca6c27e1-d55f-43cc-8dee-29ddc604baad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1537.837424] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:89:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca6c27e1-d55f-43cc-8dee-29ddc604baad', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1537.847565] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Creating folder: Project (640853b7caa24c45afca3f1db04ec053). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1537.851523] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802309, 'name': Rename_Task, 'duration_secs': 0.186021} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.857439] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-517bb91c-386c-4fd8-ad54-dd83ac11a0ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.858053] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1537.860149] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e416716b-01be-4010-939a-b7af88fbdf61 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.872786] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1537.872786] env[62519]: value = "task-1802311" [ 1537.872786] env[62519]: _type = "Task" [ 1537.872786] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.881685] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Created folder: Project (640853b7caa24c45afca3f1db04ec053) in parent group-v373567. [ 1537.881983] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Creating folder: Instances. Parent ref: group-v373676. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1537.885595] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60625f6b-082d-4bf1-b08c-65b859fe1700 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.892233] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802311, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.902255] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Created folder: Instances in parent group-v373676. [ 1537.902689] env[62519]: DEBUG oslo.service.loopingcall [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1537.903121] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1537.903502] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f661bd8-781f-4373-87ba-14913078cded {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.951260] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1537.951260] env[62519]: value = "task-1802313" [ 1537.951260] env[62519]: _type = "Task" [ 1537.951260] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.965710] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802313, 'name': CreateVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.206337] env[62519]: DEBUG nova.compute.manager [req-5b8c0ff6-f960-484e-8a8b-cbd51064b151 req-76f705da-eb3f-4f64-b706-bfc5f371c9c6 service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Received event network-changed-ca6c27e1-d55f-43cc-8dee-29ddc604baad {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1538.206337] env[62519]: DEBUG nova.compute.manager [req-5b8c0ff6-f960-484e-8a8b-cbd51064b151 req-76f705da-eb3f-4f64-b706-bfc5f371c9c6 service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Refreshing instance network info cache due to event network-changed-ca6c27e1-d55f-43cc-8dee-29ddc604baad. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1538.206337] env[62519]: DEBUG oslo_concurrency.lockutils [req-5b8c0ff6-f960-484e-8a8b-cbd51064b151 req-76f705da-eb3f-4f64-b706-bfc5f371c9c6 service nova] Acquiring lock "refresh_cache-22380aef-c725-43a0-a957-06ced9518c21" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.206337] env[62519]: DEBUG oslo_concurrency.lockutils [req-5b8c0ff6-f960-484e-8a8b-cbd51064b151 req-76f705da-eb3f-4f64-b706-bfc5f371c9c6 service nova] Acquired lock "refresh_cache-22380aef-c725-43a0-a957-06ced9518c21" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.206337] env[62519]: DEBUG nova.network.neutron [req-5b8c0ff6-f960-484e-8a8b-cbd51064b151 req-76f705da-eb3f-4f64-b706-bfc5f371c9c6 service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Refreshing network info cache for port ca6c27e1-d55f-43cc-8dee-29ddc604baad {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1538.345024] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.355276] env[62519]: INFO nova.compute.manager [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] instance snapshotting [ 1538.358443] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666d8324-47b0-4313-9d81-c5399949ad69 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.398206] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94df7d5c-6c97-4edf-b939-f356ecdc3245 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.409536] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802311, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.465645] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802313, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.487823] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd888810-12c8-41e4-b7c4-69e9ebe077d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.497402] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20539e13-5a5e-4d00-903f-175462392387 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.536800] env[62519]: DEBUG nova.network.neutron [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Successfully updated port: e362cda5-6e64-4311-96a1-880e1f66ab32 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1538.541118] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d661990a-9930-472c-aef3-31688035ae3f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.549836] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5b3bee-adf1-4f9a-8aa9-8e5e31673259 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.568907] env[62519]: DEBUG nova.compute.provider_tree [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1538.608358] env[62519]: DEBUG nova.compute.manager [req-8ba6b5cf-2ea7-4280-b75d-bf0009bda882 req-194217c6-de9f-4316-a5c6-ce46896549d9 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Received event network-vif-deleted-57170226-5721-470d-8c8b-652ddff02ff6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1538.608358] env[62519]: INFO nova.compute.manager [req-8ba6b5cf-2ea7-4280-b75d-bf0009bda882 req-194217c6-de9f-4316-a5c6-ce46896549d9 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Neutron deleted interface 57170226-5721-470d-8c8b-652ddff02ff6; detaching it from the instance and deleting it from the info cache [ 1538.608358] env[62519]: DEBUG nova.network.neutron [req-8ba6b5cf-2ea7-4280-b75d-bf0009bda882 req-194217c6-de9f-4316-a5c6-ce46896549d9 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.805793] env[62519]: DEBUG nova.network.neutron [-] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.848220] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Releasing lock "refresh_cache-c8b7568b-ba07-4f65-818b-f84910209361" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.848220] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Updated the network info_cache for instance {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10489}} [ 1538.848220] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.848220] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.848220] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.849292] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.849292] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.849988] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.850722] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1538.850930] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.901523] env[62519]: DEBUG oslo_vmware.api [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802311, 'name': PowerOnVM_Task, 'duration_secs': 0.731409} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.902227] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1538.902516] env[62519]: INFO nova.compute.manager [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Took 7.98 seconds to spawn the instance on the hypervisor. [ 1538.906027] env[62519]: DEBUG nova.compute.manager [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1538.906027] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae6e122-91ad-4dc0-b56c-5373bd8616b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.920854] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1538.925569] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-985a77ec-6e67-41c3-9001-481e05361ed8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.933288] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1538.933288] env[62519]: value = "task-1802314" [ 1538.933288] env[62519]: _type = "Task" [ 1538.933288] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.946315] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802314, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.963182] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802313, 'name': CreateVM_Task, 'duration_secs': 0.716646} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.963420] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1538.964186] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.964445] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.964853] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1538.965955] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84e1fe09-029a-4a73-9222-2ef77763e401 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.972516] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1538.972516] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d3e780-a434-30cf-bc3e-92f2135c2f9a" [ 1538.972516] env[62519]: _type = "Task" [ 1538.972516] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.985383] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d3e780-a434-30cf-bc3e-92f2135c2f9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.046653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "refresh_cache-f19c860f-736a-4783-8ef5-8262040e53a3" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.046789] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquired lock "refresh_cache-f19c860f-736a-4783-8ef5-8262040e53a3" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.046991] env[62519]: DEBUG nova.network.neutron [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1539.110545] env[62519]: DEBUG nova.scheduler.client.report [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 69 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1539.110851] env[62519]: DEBUG nova.compute.provider_tree [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 69 to 70 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1539.111072] env[62519]: DEBUG nova.compute.provider_tree [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1539.115093] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23758220-d647-4a92-bbfd-e09218091cf7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.129344] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f224713f-13a2-4e3f-a953-c8eda0b5b2dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.173386] env[62519]: DEBUG nova.compute.manager [req-8ba6b5cf-2ea7-4280-b75d-bf0009bda882 req-194217c6-de9f-4316-a5c6-ce46896549d9 service nova] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Detach interface failed, port_id=57170226-5721-470d-8c8b-652ddff02ff6, reason: Instance 4c336ad1-8ce6-4f89-843e-0baae0d0dbda could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1539.181871] env[62519]: DEBUG nova.network.neutron [req-5b8c0ff6-f960-484e-8a8b-cbd51064b151 req-76f705da-eb3f-4f64-b706-bfc5f371c9c6 service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Updated VIF entry in instance network info cache for port ca6c27e1-d55f-43cc-8dee-29ddc604baad. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1539.182363] env[62519]: DEBUG nova.network.neutron [req-5b8c0ff6-f960-484e-8a8b-cbd51064b151 req-76f705da-eb3f-4f64-b706-bfc5f371c9c6 service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Updating instance_info_cache with network_info: [{"id": "ca6c27e1-d55f-43cc-8dee-29ddc604baad", "address": "fa:16:3e:dd:89:0e", "network": {"id": "7d9f5ffd-ddab-49e8-9121-a45e95fbd6f1", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-947344625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "640853b7caa24c45afca3f1db04ec053", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca6c27e1-d5", "ovs_interfaceid": "ca6c27e1-d55f-43cc-8dee-29ddc604baad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.311536] env[62519]: INFO nova.compute.manager [-] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Took 1.90 seconds to deallocate network for instance. [ 1539.355262] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.426025] env[62519]: INFO nova.compute.manager [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Took 47.52 seconds to build instance. [ 1539.446015] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802314, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.484239] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d3e780-a434-30cf-bc3e-92f2135c2f9a, 'name': SearchDatastore_Task, 'duration_secs': 0.01363} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.484435] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.484520] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1539.484760] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.484896] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.485081] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1539.485374] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5b3f8a0-97c9-44c6-a9bb-2280d2a4a23a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.496918] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1539.496918] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1539.501021] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6e1643f-fa28-4e4f-a115-db55621b85e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.503351] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1539.503351] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fa9c0d-db6a-c14e-bbf5-cba2c23b349b" [ 1539.503351] env[62519]: _type = "Task" [ 1539.503351] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.512251] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fa9c0d-db6a-c14e-bbf5-cba2c23b349b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.615128] env[62519]: DEBUG nova.network.neutron [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1539.618691] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.069s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.619272] env[62519]: DEBUG nova.compute.manager [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1539.622504] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.623707] env[62519]: INFO nova.compute.claims [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1539.686560] env[62519]: DEBUG oslo_concurrency.lockutils [req-5b8c0ff6-f960-484e-8a8b-cbd51064b151 req-76f705da-eb3f-4f64-b706-bfc5f371c9c6 service nova] Releasing lock "refresh_cache-22380aef-c725-43a0-a957-06ced9518c21" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.818181] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.860580] env[62519]: DEBUG nova.network.neutron [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Updating instance_info_cache with network_info: [{"id": "e362cda5-6e64-4311-96a1-880e1f66ab32", "address": "fa:16:3e:15:fb:b0", "network": {"id": "deb202ae-72d1-47f1-8354-3ea5d8bdcd2a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1495521208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d004a7d2ec074aa39666ae15861b9440", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape362cda5-6e", "ovs_interfaceid": "e362cda5-6e64-4311-96a1-880e1f66ab32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.946581] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802314, 'name': CreateSnapshot_Task, 'duration_secs': 0.579679} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.946864] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1539.947721] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b3f7f4-e11b-4fa2-91c1-feed1fe34652 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.013878] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fa9c0d-db6a-c14e-bbf5-cba2c23b349b, 'name': SearchDatastore_Task, 'duration_secs': 0.018108} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.014790] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ec09433-60b6-46bc-b538-0bdb879b2f87 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.021078] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1540.021078] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5205ed6b-acd9-921c-c82e-93f0a5123c43" [ 1540.021078] env[62519]: _type = "Task" [ 1540.021078] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.029341] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5205ed6b-acd9-921c-c82e-93f0a5123c43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.128641] env[62519]: DEBUG nova.compute.utils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1540.132434] env[62519]: DEBUG nova.compute.manager [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1540.132610] env[62519]: DEBUG nova.network.neutron [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1540.205558] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.205822] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.244635] env[62519]: DEBUG nova.policy [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbf140b3b87d4f8ea80c5f91a9dd9c37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f69f08ab17cc423a98f0ae56f706c62b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1540.363290] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Releasing lock "refresh_cache-f19c860f-736a-4783-8ef5-8262040e53a3" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.363829] env[62519]: DEBUG nova.compute.manager [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Instance network_info: |[{"id": "e362cda5-6e64-4311-96a1-880e1f66ab32", "address": "fa:16:3e:15:fb:b0", "network": {"id": "deb202ae-72d1-47f1-8354-3ea5d8bdcd2a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1495521208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d004a7d2ec074aa39666ae15861b9440", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape362cda5-6e", "ovs_interfaceid": "e362cda5-6e64-4311-96a1-880e1f66ab32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1540.364352] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:fb:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '098df9b7-d759-47f7-b756-334848cb423b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e362cda5-6e64-4311-96a1-880e1f66ab32', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1540.373307] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Creating folder: Project (d004a7d2ec074aa39666ae15861b9440). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1540.373597] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f28e7b39-c267-4f82-84ff-6a2b5d171049 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.385937] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Created folder: Project (d004a7d2ec074aa39666ae15861b9440) in parent group-v373567. [ 1540.386158] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Creating folder: Instances. Parent ref: group-v373680. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1540.386433] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40a8f8ae-0c26-497b-8d19-5dc465ad3d56 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.399069] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Created folder: Instances in parent group-v373680. [ 1540.399321] env[62519]: DEBUG oslo.service.loopingcall [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1540.399515] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1540.399717] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-175bfe5b-c5ad-434f-93fb-93c161ab83d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.422423] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1540.422423] env[62519]: value = "task-1802317" [ 1540.422423] env[62519]: _type = "Task" [ 1540.422423] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.431313] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802317, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.468094] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1540.468977] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dabe6769-8139-499e-8ce8-d6f12d81c061 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.477086] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1540.477086] env[62519]: value = "task-1802318" [ 1540.477086] env[62519]: _type = "Task" [ 1540.477086] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.487291] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802318, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.533346] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5205ed6b-acd9-921c-c82e-93f0a5123c43, 'name': SearchDatastore_Task, 'duration_secs': 0.02947} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.533346] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.533346] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 22380aef-c725-43a0-a957-06ced9518c21/22380aef-c725-43a0-a957-06ced9518c21.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1540.533581] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb40a5d8-a825-41e8-b263-a05f07e1617e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.541127] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1540.541127] env[62519]: value = "task-1802319" [ 1540.541127] env[62519]: _type = "Task" [ 1540.541127] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.550081] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.643812] env[62519]: DEBUG nova.compute.manager [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1540.880253] env[62519]: DEBUG nova.compute.manager [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Received event network-vif-plugged-e362cda5-6e64-4311-96a1-880e1f66ab32 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1540.880474] env[62519]: DEBUG oslo_concurrency.lockutils [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] Acquiring lock "f19c860f-736a-4783-8ef5-8262040e53a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.880679] env[62519]: DEBUG oslo_concurrency.lockutils [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] Lock "f19c860f-736a-4783-8ef5-8262040e53a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.880839] env[62519]: DEBUG oslo_concurrency.lockutils [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] Lock "f19c860f-736a-4783-8ef5-8262040e53a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.880998] env[62519]: DEBUG nova.compute.manager [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] No waiting events found dispatching network-vif-plugged-e362cda5-6e64-4311-96a1-880e1f66ab32 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1540.881438] env[62519]: WARNING nova.compute.manager [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Received unexpected event network-vif-plugged-e362cda5-6e64-4311-96a1-880e1f66ab32 for instance with vm_state building and task_state spawning. [ 1540.881609] env[62519]: DEBUG nova.compute.manager [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Received event network-changed-e362cda5-6e64-4311-96a1-880e1f66ab32 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1540.881758] env[62519]: DEBUG nova.compute.manager [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Refreshing instance network info cache due to event network-changed-e362cda5-6e64-4311-96a1-880e1f66ab32. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1540.881939] env[62519]: DEBUG oslo_concurrency.lockutils [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] Acquiring lock "refresh_cache-f19c860f-736a-4783-8ef5-8262040e53a3" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.882089] env[62519]: DEBUG oslo_concurrency.lockutils [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] Acquired lock "refresh_cache-f19c860f-736a-4783-8ef5-8262040e53a3" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.882242] env[62519]: DEBUG nova.network.neutron [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Refreshing network info cache for port e362cda5-6e64-4311-96a1-880e1f66ab32 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1540.936479] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6acc3e5a-9e65-48ca-804d-bd19df3814de tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "09eefc1a-011b-4d2c-ab75-a1fcee740907" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.130s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.936982] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802317, 'name': CreateVM_Task, 'duration_secs': 0.364393} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.940661] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1540.942175] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.942272] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.942548] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1540.942844] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f99d0218-4718-4663-9995-e394a557ae45 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.948640] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1540.948640] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52672081-4e14-e6be-b49c-e028a51b799b" [ 1540.948640] env[62519]: _type = "Task" [ 1540.948640] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.963364] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52672081-4e14-e6be-b49c-e028a51b799b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.993292] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802318, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.006637] env[62519]: DEBUG nova.network.neutron [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Successfully created port: 11187365-8def-40f6-822e-05bb089dd16b {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1541.057408] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802319, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.296928] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce46ce1-af98-45ac-b769-d01fce7718ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.305415] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44129c95-3d6c-4aa3-8d47-9fc4bcc1ef5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.337339] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5590fd9-b931-4032-8f8b-614bdc3dd424 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.346406] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6410627d-163e-42bf-b30e-791f54063716 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.360620] env[62519]: DEBUG nova.compute.provider_tree [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1541.441955] env[62519]: DEBUG nova.compute.manager [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1541.458412] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52672081-4e14-e6be-b49c-e028a51b799b, 'name': SearchDatastore_Task, 'duration_secs': 0.060763} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.459460] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.459460] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1541.459460] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.459460] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.459460] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1541.459720] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65f27ecf-84d0-4a44-8190-64fd59a9337f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.469832] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1541.469832] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1541.470655] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2449a6b6-1c2d-4146-9b68-97c204b0668c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.477064] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1541.477064] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a8d261-3d7c-cf59-4526-ea9c8af56537" [ 1541.477064] env[62519]: _type = "Task" [ 1541.477064] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.490505] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a8d261-3d7c-cf59-4526-ea9c8af56537, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.493555] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802318, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.552714] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720121} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.553035] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 22380aef-c725-43a0-a957-06ced9518c21/22380aef-c725-43a0-a957-06ced9518c21.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1541.553341] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1541.553573] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-130ff090-ccc7-4670-93ec-2266331065a9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.560481] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1541.560481] env[62519]: value = "task-1802320" [ 1541.560481] env[62519]: _type = "Task" [ 1541.560481] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.574671] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802320, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.656845] env[62519]: DEBUG nova.compute.manager [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1541.660217] env[62519]: DEBUG nova.network.neutron [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Updated VIF entry in instance network info cache for port e362cda5-6e64-4311-96a1-880e1f66ab32. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1541.660217] env[62519]: DEBUG nova.network.neutron [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Updating instance_info_cache with network_info: [{"id": "e362cda5-6e64-4311-96a1-880e1f66ab32", "address": "fa:16:3e:15:fb:b0", "network": {"id": "deb202ae-72d1-47f1-8354-3ea5d8bdcd2a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1495521208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d004a7d2ec074aa39666ae15861b9440", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape362cda5-6e", "ovs_interfaceid": "e362cda5-6e64-4311-96a1-880e1f66ab32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.685431] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1541.685729] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1541.685935] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1541.686169] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1541.686363] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1541.686563] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1541.686756] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1541.687015] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1541.687311] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1541.687622] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1541.687888] env[62519]: DEBUG nova.virt.hardware [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1541.689152] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbef9d2-62d8-4385-b0dc-0aa381622e3a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.700427] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca8aef1-546a-4476-b418-57cbc55bb1d8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.865142] env[62519]: DEBUG nova.scheduler.client.report [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1541.963399] env[62519]: DEBUG oslo_concurrency.lockutils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.989736] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a8d261-3d7c-cf59-4526-ea9c8af56537, 'name': SearchDatastore_Task, 'duration_secs': 0.012388} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.990996] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f1ced6e-f4d1-4a1a-bebc-a79e233eca47 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.996516] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802318, 'name': CloneVM_Task} progress is 95%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.999771] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1541.999771] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f47d35-a09f-b527-eaae-f70f8d67722d" [ 1541.999771] env[62519]: _type = "Task" [ 1541.999771] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.007790] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f47d35-a09f-b527-eaae-f70f8d67722d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.074404] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802320, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065917} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.074673] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1542.076023] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79219747-7768-4cf0-ac6c-782543368d56 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.100103] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 22380aef-c725-43a0-a957-06ced9518c21/22380aef-c725-43a0-a957-06ced9518c21.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1542.100103] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25a4600e-72d8-4783-8356-0e24275159da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.120286] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1542.120286] env[62519]: value = "task-1802321" [ 1542.120286] env[62519]: _type = "Task" [ 1542.120286] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.129155] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802321, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.162554] env[62519]: DEBUG oslo_concurrency.lockutils [req-d828ed71-3dcd-4eb3-bfa9-24c4bf8eb877 req-fab61921-f605-4a8e-a2ee-f176aea54c7f service nova] Releasing lock "refresh_cache-f19c860f-736a-4783-8ef5-8262040e53a3" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.369032] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.369583] env[62519]: DEBUG nova.compute.manager [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1542.372786] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.661s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.374269] env[62519]: INFO nova.compute.claims [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1542.495093] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802318, 'name': CloneVM_Task, 'duration_secs': 1.667694} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.495401] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Created linked-clone VM from snapshot [ 1542.496353] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345d9711-ab2d-469f-ae21-abe83a262cbc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.508517] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Uploading image 59def757-f227-4fcd-966f-6e6296b8e4b8 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1542.516921] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f47d35-a09f-b527-eaae-f70f8d67722d, 'name': SearchDatastore_Task, 'duration_secs': 0.020119} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.517222] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.517482] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f19c860f-736a-4783-8ef5-8262040e53a3/f19c860f-736a-4783-8ef5-8262040e53a3.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1542.517768] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ece4f43b-8b8c-410d-8031-576eafb589bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.525372] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1542.525372] env[62519]: value = "task-1802322" [ 1542.525372] env[62519]: _type = "Task" [ 1542.525372] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.531177] env[62519]: DEBUG oslo_vmware.rw_handles [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1542.531177] env[62519]: value = "vm-373683" [ 1542.531177] env[62519]: _type = "VirtualMachine" [ 1542.531177] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1542.531451] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a563468f-c29f-4313-ac71-7ab1a7d11feb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.539540] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.539540] env[62519]: DEBUG oslo_vmware.rw_handles [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lease: (returnval){ [ 1542.539540] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5add7-ba73-ee9a-6bd6-a8b2fdd8817c" [ 1542.539540] env[62519]: _type = "HttpNfcLease" [ 1542.539540] env[62519]: } obtained for exporting VM: (result){ [ 1542.539540] env[62519]: value = "vm-373683" [ 1542.539540] env[62519]: _type = "VirtualMachine" [ 1542.539540] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1542.539917] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the lease: (returnval){ [ 1542.539917] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5add7-ba73-ee9a-6bd6-a8b2fdd8817c" [ 1542.539917] env[62519]: _type = "HttpNfcLease" [ 1542.539917] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1542.546244] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1542.546244] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5add7-ba73-ee9a-6bd6-a8b2fdd8817c" [ 1542.546244] env[62519]: _type = "HttpNfcLease" [ 1542.546244] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1542.631147] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802321, 'name': ReconfigVM_Task, 'duration_secs': 0.485676} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.631675] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 22380aef-c725-43a0-a957-06ced9518c21/22380aef-c725-43a0-a957-06ced9518c21.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1542.632473] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f340fcb4-e8af-411c-ba9a-fbeaec49a06b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.641401] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1542.641401] env[62519]: value = "task-1802324" [ 1542.641401] env[62519]: _type = "Task" [ 1542.641401] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.649130] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802324, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.669758] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "9ac3344d-219a-487f-b83f-96c17cd86dad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.670996] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.879542] env[62519]: DEBUG nova.compute.utils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1542.886579] env[62519]: DEBUG nova.compute.manager [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1542.886579] env[62519]: DEBUG nova.network.neutron [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1542.926805] env[62519]: DEBUG nova.policy [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbf140b3b87d4f8ea80c5f91a9dd9c37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f69f08ab17cc423a98f0ae56f706c62b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1542.982592] env[62519]: DEBUG nova.compute.manager [req-a37f52ea-70c7-492f-ac0e-5707c152aa64 req-f531e1b2-e0aa-41e6-8984-ed266b0d5864 service nova] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Received event network-vif-plugged-11187365-8def-40f6-822e-05bb089dd16b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1542.983852] env[62519]: DEBUG oslo_concurrency.lockutils [req-a37f52ea-70c7-492f-ac0e-5707c152aa64 req-f531e1b2-e0aa-41e6-8984-ed266b0d5864 service nova] Acquiring lock "8070aa59-3547-460a-b914-0e84620023d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.983852] env[62519]: DEBUG oslo_concurrency.lockutils [req-a37f52ea-70c7-492f-ac0e-5707c152aa64 req-f531e1b2-e0aa-41e6-8984-ed266b0d5864 service nova] Lock "8070aa59-3547-460a-b914-0e84620023d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.983852] env[62519]: DEBUG oslo_concurrency.lockutils [req-a37f52ea-70c7-492f-ac0e-5707c152aa64 req-f531e1b2-e0aa-41e6-8984-ed266b0d5864 service nova] Lock "8070aa59-3547-460a-b914-0e84620023d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.983852] env[62519]: DEBUG nova.compute.manager [req-a37f52ea-70c7-492f-ac0e-5707c152aa64 req-f531e1b2-e0aa-41e6-8984-ed266b0d5864 service nova] [instance: 8070aa59-3547-460a-b914-0e84620023d0] No waiting events found dispatching network-vif-plugged-11187365-8def-40f6-822e-05bb089dd16b {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1542.983852] env[62519]: WARNING nova.compute.manager [req-a37f52ea-70c7-492f-ac0e-5707c152aa64 req-f531e1b2-e0aa-41e6-8984-ed266b0d5864 service nova] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Received unexpected event network-vif-plugged-11187365-8def-40f6-822e-05bb089dd16b for instance with vm_state building and task_state spawning. [ 1543.042418] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802322, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.056683] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1543.056683] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5add7-ba73-ee9a-6bd6-a8b2fdd8817c" [ 1543.056683] env[62519]: _type = "HttpNfcLease" [ 1543.056683] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1543.056683] env[62519]: DEBUG oslo_vmware.rw_handles [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1543.056683] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5add7-ba73-ee9a-6bd6-a8b2fdd8817c" [ 1543.056683] env[62519]: _type = "HttpNfcLease" [ 1543.056683] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1543.057837] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b7419e-00a4-40cd-9d69-fb06f30a4a9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.062712] env[62519]: DEBUG nova.network.neutron [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Successfully updated port: 11187365-8def-40f6-822e-05bb089dd16b {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1543.070153] env[62519]: DEBUG oslo_vmware.rw_handles [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b489a0-c28f-4c20-f852-e0fdaea03b98/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1543.070446] env[62519]: DEBUG oslo_vmware.rw_handles [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b489a0-c28f-4c20-f852-e0fdaea03b98/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1543.155011] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802324, 'name': Rename_Task, 'duration_secs': 0.21181} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.155011] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1543.155011] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb96f479-eba6-4e5f-905f-2d7e61671442 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.164145] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1543.164145] env[62519]: value = "task-1802325" [ 1543.164145] env[62519]: _type = "Task" [ 1543.164145] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.173442] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802325, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.224316] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cd420ac8-0f02-46c8-aa56-f62321d912dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.265601] env[62519]: DEBUG nova.network.neutron [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Successfully created port: 85d5a3eb-9d3d-46ac-b538-3e317a13e6fc {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1543.384886] env[62519]: DEBUG nova.compute.manager [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1543.538339] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.78426} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.538656] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f19c860f-736a-4783-8ef5-8262040e53a3/f19c860f-736a-4783-8ef5-8262040e53a3.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1543.538947] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1543.539222] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4556ae5d-e49c-45e4-962d-6e8dd0f1b4c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.546706] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1543.546706] env[62519]: value = "task-1802326" [ 1543.546706] env[62519]: _type = "Task" [ 1543.546706] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.560288] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802326, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.566775] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "refresh_cache-8070aa59-3547-460a-b914-0e84620023d0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1543.566902] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "refresh_cache-8070aa59-3547-460a-b914-0e84620023d0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.567055] env[62519]: DEBUG nova.network.neutron [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1543.682979] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802325, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.034945] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68879e3-c7e5-4b57-bc24-7be6f9defe86 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.043058] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ddbb45-73da-4f7c-bfaa-969e6087aa56 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.079940] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2c947c-7d38-47d2-84bf-ce95064f75c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.086221] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802326, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066867} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.086852] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1544.087732] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b815d9a-6206-4fd3-84a7-ad180dbf8f27 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.094128] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d43d2a1-fb7f-46bd-b70c-46268ed2a7f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.114930] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] f19c860f-736a-4783-8ef5-8262040e53a3/f19c860f-736a-4783-8ef5-8262040e53a3.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1544.115925] env[62519]: DEBUG nova.network.neutron [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1544.118158] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34d01327-8284-4f63-a432-fabdab03c8e6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.140893] env[62519]: DEBUG nova.compute.provider_tree [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1544.148352] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1544.148352] env[62519]: value = "task-1802327" [ 1544.148352] env[62519]: _type = "Task" [ 1544.148352] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.161686] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802327, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.176605] env[62519]: DEBUG oslo_vmware.api [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802325, 'name': PowerOnVM_Task, 'duration_secs': 0.601036} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.176880] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1544.177103] env[62519]: INFO nova.compute.manager [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Took 9.29 seconds to spawn the instance on the hypervisor. [ 1544.177278] env[62519]: DEBUG nova.compute.manager [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1544.178166] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f77e574-909f-44cf-8ea3-3c09a2b96490 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.367753] env[62519]: DEBUG nova.network.neutron [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Updating instance_info_cache with network_info: [{"id": "11187365-8def-40f6-822e-05bb089dd16b", "address": "fa:16:3e:54:e8:ea", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11187365-8d", "ovs_interfaceid": "11187365-8def-40f6-822e-05bb089dd16b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.405773] env[62519]: DEBUG nova.compute.manager [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1544.646391] env[62519]: DEBUG nova.scheduler.client.report [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1544.669702] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802327, 'name': ReconfigVM_Task, 'duration_secs': 0.289847} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.675048] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Reconfigured VM instance instance-00000026 to attach disk [datastore1] f19c860f-736a-4783-8ef5-8262040e53a3/f19c860f-736a-4783-8ef5-8262040e53a3.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1544.675233] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93441885-1bbd-4fff-8e01-214793bcd0c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.682319] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1544.682319] env[62519]: value = "task-1802328" [ 1544.682319] env[62519]: _type = "Task" [ 1544.682319] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.703607] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802328, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.706368] env[62519]: INFO nova.compute.manager [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Took 50.23 seconds to build instance. [ 1544.871672] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "refresh_cache-8070aa59-3547-460a-b914-0e84620023d0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.872106] env[62519]: DEBUG nova.compute.manager [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Instance network_info: |[{"id": "11187365-8def-40f6-822e-05bb089dd16b", "address": "fa:16:3e:54:e8:ea", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11187365-8d", "ovs_interfaceid": "11187365-8def-40f6-822e-05bb089dd16b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1544.875873] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:e8:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4712af2-45ef-4652-8d2c-482ec70056d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11187365-8def-40f6-822e-05bb089dd16b', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1544.885471] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Creating folder: Project (f69f08ab17cc423a98f0ae56f706c62b). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1544.885556] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd6e0fcc-2087-45f6-b0c0-d11cab6e1392 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.897730] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Created folder: Project (f69f08ab17cc423a98f0ae56f706c62b) in parent group-v373567. [ 1544.898083] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Creating folder: Instances. Parent ref: group-v373684. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1544.898335] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-237e2762-850d-4632-8829-8430928548f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.907297] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Created folder: Instances in parent group-v373684. [ 1544.907589] env[62519]: DEBUG oslo.service.loopingcall [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.908452] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1544.908452] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e2ecaf5-af84-44ff-9e1f-bc200f8bf694 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.928736] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1544.928736] env[62519]: value = "task-1802331" [ 1544.928736] env[62519]: _type = "Task" [ 1544.928736] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.941901] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802331, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.139184] env[62519]: DEBUG nova.network.neutron [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Successfully updated port: 85d5a3eb-9d3d-46ac-b538-3e317a13e6fc {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1545.155406] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.782s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.156114] env[62519]: DEBUG nova.compute.manager [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1545.159534] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.954s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.159943] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.163685] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.853s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.164184] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.168307] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.134s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.168307] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.173677] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.480s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.175883] env[62519]: INFO nova.compute.claims [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1545.197248] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802328, 'name': Rename_Task, 'duration_secs': 0.159143} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.197516] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1545.197721] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e0b510b-4dc3-4671-baf7-eb2479528e8f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.208040] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1545.208040] env[62519]: value = "task-1802332" [ 1545.208040] env[62519]: _type = "Task" [ 1545.208040] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.226662] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.230464] env[62519]: INFO nova.scheduler.client.report [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Deleted allocations for instance 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c [ 1545.248597] env[62519]: INFO nova.scheduler.client.report [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Deleted allocations for instance 56790738-4759-468a-9f43-f9c2bc2de23a [ 1545.268559] env[62519]: INFO nova.scheduler.client.report [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Deleted allocations for instance cfefa7c8-4986-4ad0-ac20-8784ee44a737 [ 1545.440182] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802331, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.644468] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "refresh_cache-27f9e890-4733-43aa-9bf1-351d42d75418" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.644468] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "refresh_cache-27f9e890-4733-43aa-9bf1-351d42d75418" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.644468] env[62519]: DEBUG nova.network.neutron [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1545.684032] env[62519]: DEBUG nova.compute.utils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1545.688411] env[62519]: DEBUG nova.compute.manager [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1545.688681] env[62519]: DEBUG nova.network.neutron [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1545.721912] env[62519]: DEBUG oslo_vmware.api [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802332, 'name': PowerOnVM_Task, 'duration_secs': 0.503818} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.722701] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1545.722701] env[62519]: INFO nova.compute.manager [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1545.722886] env[62519]: DEBUG nova.compute.manager [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1545.723718] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea2afef-ffc4-4579-8f6d-fbc963e01c3f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.736714] env[62519]: DEBUG nova.policy [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d142fff29f945669e31da144e9cd9e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10b805c99a6945f3bcc685e1c2f9a816', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1545.744025] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9eaae7f7-bdee-49ed-a285-41553171929e tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "3aab3adb-b28b-45dd-880f-b1cfbaeeed0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.065s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.759856] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b10b0bf2-5335-4999-b79b-72a39bf6242d tempest-ListServersNegativeTestJSON-11413003 tempest-ListServersNegativeTestJSON-11413003-project-member] Lock "56790738-4759-468a-9f43-f9c2bc2de23a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.176s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.780404] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24e41ff3-6996-4210-8329-73785737b570 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "cfefa7c8-4986-4ad0-ac20-8784ee44a737" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.191s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.942682] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802331, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.053241] env[62519]: DEBUG nova.network.neutron [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Successfully created port: d23f0b24-455b-4112-8518-1eaca05eb428 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1546.186871] env[62519]: DEBUG nova.network.neutron [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1546.192645] env[62519]: DEBUG nova.compute.manager [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1546.223598] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8bccbc50-fc90-49af-b28a-2ab70ae51360 tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "22380aef-c725-43a0-a957-06ced9518c21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.328s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.250263] env[62519]: INFO nova.compute.manager [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Took 48.24 seconds to build instance. [ 1546.371187] env[62519]: DEBUG nova.network.neutron [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Updating instance_info_cache with network_info: [{"id": "85d5a3eb-9d3d-46ac-b538-3e317a13e6fc", "address": "fa:16:3e:74:ba:4e", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85d5a3eb-9d", "ovs_interfaceid": "85d5a3eb-9d3d-46ac-b538-3e317a13e6fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.439555] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802331, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.730518] env[62519]: DEBUG nova.compute.manager [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1546.761160] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee63d99e-59c6-45e7-9c21-28b44eef26d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.771867] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352d03bf-3616-422e-8260-4e74f28d6c67 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.510023] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "refresh_cache-27f9e890-4733-43aa-9bf1-351d42d75418" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.510023] env[62519]: DEBUG nova.compute.manager [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Instance network_info: |[{"id": "85d5a3eb-9d3d-46ac-b538-3e317a13e6fc", "address": "fa:16:3e:74:ba:4e", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85d5a3eb-9d", "ovs_interfaceid": "85d5a3eb-9d3d-46ac-b538-3e317a13e6fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1547.510023] env[62519]: DEBUG nova.compute.manager [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1547.520034] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7be5d0e-1637-46f7-bb8e-cf1d194d0fb4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.532234] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538ae4db-af84-4aa3-b9db-b459fd8290b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.537279] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802331, 'name': CreateVM_Task, 'duration_secs': 1.537833} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.537279] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1547.537970] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.538194] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.538600] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1547.547518] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db2e59b7-3b2a-4560-a995-6b6fe6b88185 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.549724] env[62519]: DEBUG nova.compute.provider_tree [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1547.555050] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.557023] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1547.557023] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522bcb08-a962-8b30-ab18-e4b6a2e47a6a" [ 1547.557023] env[62519]: _type = "Task" [ 1547.557023] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.565716] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522bcb08-a962-8b30-ab18-e4b6a2e47a6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.609852] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1547.610128] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1547.610262] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1547.610646] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1547.610646] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1547.610753] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1547.610940] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1547.611103] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1547.611260] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1547.611412] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1547.611571] env[62519]: DEBUG nova.virt.hardware [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1547.614485] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2538dd8-f532-4406-a2c6-73ff623172b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.623948] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1547.624139] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1547.624291] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1547.624613] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1547.624684] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1547.624924] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1547.625072] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1547.625231] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1547.625398] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1547.625550] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1547.625708] env[62519]: DEBUG nova.virt.hardware [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1547.627074] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cb50ed-956b-493b-8e3b-4d2d0e88ff33 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.633359] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c978bbb1-2d51-4011-b5d2-8352286a6df2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.642523] env[62519]: DEBUG oslo_vmware.rw_handles [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5227b18a-0d4e-21a1-5a7b-6a9040892fac/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1547.651585] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f81755-f753-47ef-ab40-3bdb2b06a514 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.656426] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051dd9a2-af6a-4378-946e-92e458dbc331 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.659244] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:ba:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4712af2-45ef-4652-8d2c-482ec70056d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85d5a3eb-9d3d-46ac-b538-3e317a13e6fc', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1547.666500] env[62519]: DEBUG oslo.service.loopingcall [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.666812] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1547.667951] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fddafe3-1125-4188-8e10-e4dd0811e5a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.692142] env[62519]: DEBUG oslo_vmware.rw_handles [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5227b18a-0d4e-21a1-5a7b-6a9040892fac/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1547.696019] env[62519]: ERROR oslo_vmware.rw_handles [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5227b18a-0d4e-21a1-5a7b-6a9040892fac/disk-0.vmdk due to incomplete transfer. [ 1547.696019] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d87028fd-3a19-41f1-8d2e-a5bc115aec9d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.697074] env[62519]: DEBUG nova.network.neutron [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Successfully updated port: d23f0b24-455b-4112-8518-1eaca05eb428 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1547.699219] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1547.699219] env[62519]: value = "task-1802333" [ 1547.699219] env[62519]: _type = "Task" [ 1547.699219] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.709534] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802333, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.710771] env[62519]: DEBUG oslo_vmware.rw_handles [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5227b18a-0d4e-21a1-5a7b-6a9040892fac/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1547.710959] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Uploaded image 5bc9589c-1176-4ec2-9bd9-420d0963e893 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1547.713497] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1547.714381] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d7bbccbc-c571-4fa1-8023-800aa0e75df3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.720779] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1547.720779] env[62519]: value = "task-1802334" [ 1547.720779] env[62519]: _type = "Task" [ 1547.720779] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.731536] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802334, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.016166] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b9c19239-83b3-40a1-b398-a911c392dcc2 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "f19c860f-736a-4783-8ef5-8262040e53a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.545s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.053985] env[62519]: DEBUG nova.scheduler.client.report [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1548.069967] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522bcb08-a962-8b30-ab18-e4b6a2e47a6a, 'name': SearchDatastore_Task, 'duration_secs': 0.018653} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.069967] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.069967] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1548.069967] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.069967] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.070180] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1548.070456] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64069805-cc1b-42df-bdb4-e0089bbb97f4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.082351] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1548.082351] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1548.082615] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ddc52c6-607f-4c9c-aed0-836c8b2a014f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.093107] env[62519]: DEBUG nova.compute.manager [req-11446dfd-9285-468e-a008-bae103b2e9f4 req-780a1622-8083-4b3d-a80d-fe1503462166 service nova] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Received event network-vif-plugged-85d5a3eb-9d3d-46ac-b538-3e317a13e6fc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1548.093107] env[62519]: DEBUG oslo_concurrency.lockutils [req-11446dfd-9285-468e-a008-bae103b2e9f4 req-780a1622-8083-4b3d-a80d-fe1503462166 service nova] Acquiring lock "27f9e890-4733-43aa-9bf1-351d42d75418-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.093107] env[62519]: DEBUG oslo_concurrency.lockutils [req-11446dfd-9285-468e-a008-bae103b2e9f4 req-780a1622-8083-4b3d-a80d-fe1503462166 service nova] Lock "27f9e890-4733-43aa-9bf1-351d42d75418-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.093107] env[62519]: DEBUG oslo_concurrency.lockutils [req-11446dfd-9285-468e-a008-bae103b2e9f4 req-780a1622-8083-4b3d-a80d-fe1503462166 service nova] Lock "27f9e890-4733-43aa-9bf1-351d42d75418-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.093107] env[62519]: DEBUG nova.compute.manager [req-11446dfd-9285-468e-a008-bae103b2e9f4 req-780a1622-8083-4b3d-a80d-fe1503462166 service nova] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] No waiting events found dispatching network-vif-plugged-85d5a3eb-9d3d-46ac-b538-3e317a13e6fc {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1548.093107] env[62519]: WARNING nova.compute.manager [req-11446dfd-9285-468e-a008-bae103b2e9f4 req-780a1622-8083-4b3d-a80d-fe1503462166 service nova] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Received unexpected event network-vif-plugged-85d5a3eb-9d3d-46ac-b538-3e317a13e6fc for instance with vm_state building and task_state spawning. [ 1548.098300] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1548.098300] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bbb0b-7730-5698-f07c-eb73d46b9868" [ 1548.098300] env[62519]: _type = "Task" [ 1548.098300] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.114917] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bbb0b-7730-5698-f07c-eb73d46b9868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.171084] env[62519]: DEBUG nova.compute.manager [req-3acb6577-48f6-4181-8cff-16998f0bcadd req-f0e02c04-9c6e-444f-838e-7b99a5435c75 service nova] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Received event network-changed-11187365-8def-40f6-822e-05bb089dd16b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1548.171339] env[62519]: DEBUG nova.compute.manager [req-3acb6577-48f6-4181-8cff-16998f0bcadd req-f0e02c04-9c6e-444f-838e-7b99a5435c75 service nova] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Refreshing instance network info cache due to event network-changed-11187365-8def-40f6-822e-05bb089dd16b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1548.171572] env[62519]: DEBUG oslo_concurrency.lockutils [req-3acb6577-48f6-4181-8cff-16998f0bcadd req-f0e02c04-9c6e-444f-838e-7b99a5435c75 service nova] Acquiring lock "refresh_cache-8070aa59-3547-460a-b914-0e84620023d0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.171751] env[62519]: DEBUG oslo_concurrency.lockutils [req-3acb6577-48f6-4181-8cff-16998f0bcadd req-f0e02c04-9c6e-444f-838e-7b99a5435c75 service nova] Acquired lock "refresh_cache-8070aa59-3547-460a-b914-0e84620023d0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.171913] env[62519]: DEBUG nova.network.neutron [req-3acb6577-48f6-4181-8cff-16998f0bcadd req-f0e02c04-9c6e-444f-838e-7b99a5435c75 service nova] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Refreshing network info cache for port 11187365-8def-40f6-822e-05bb089dd16b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1548.200242] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquiring lock "refresh_cache-4e3dee19-b99a-4257-88da-1b0531e2c0f9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.200422] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquired lock "refresh_cache-4e3dee19-b99a-4257-88da-1b0531e2c0f9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.200636] env[62519]: DEBUG nova.network.neutron [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1548.218210] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802333, 'name': CreateVM_Task, 'duration_secs': 0.476359} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.218384] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1548.219230] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.219462] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.219868] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1548.220950] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7376f3c0-66d7-4ff6-b2da-909022b0b5f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.234388] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1548.234388] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e1b48d-69c4-a25a-165b-4d4132e1d4a2" [ 1548.234388] env[62519]: _type = "Task" [ 1548.234388] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.238818] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802334, 'name': Destroy_Task, 'duration_secs': 0.492318} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.244183] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Destroyed the VM [ 1548.244545] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1548.245281] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a8d06ad8-41cd-4b61-a8d2-cb483246dc99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.255521] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e1b48d-69c4-a25a-165b-4d4132e1d4a2, 'name': SearchDatastore_Task, 'duration_secs': 0.011484} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.257519] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.257860] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1548.258221] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.258657] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1548.258657] env[62519]: value = "task-1802335" [ 1548.258657] env[62519]: _type = "Task" [ 1548.258657] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.270667] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802335, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.518976] env[62519]: DEBUG nova.compute.manager [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1548.563721] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.390s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.566170] env[62519]: DEBUG nova.compute.manager [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1548.569613] env[62519]: DEBUG oslo_concurrency.lockutils [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.504s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.569613] env[62519]: DEBUG nova.objects.instance [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lazy-loading 'resources' on Instance uuid c8b7568b-ba07-4f65-818b-f84910209361 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1548.610657] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bbb0b-7730-5698-f07c-eb73d46b9868, 'name': SearchDatastore_Task, 'duration_secs': 0.015863} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.612146] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74855d61-794b-49cd-8243-2580b4f400b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.618197] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1548.618197] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5236c15e-090e-8e06-3395-7229deaf9113" [ 1548.618197] env[62519]: _type = "Task" [ 1548.618197] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.629460] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5236c15e-090e-8e06-3395-7229deaf9113, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.756558] env[62519]: DEBUG nova.network.neutron [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1548.773031] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802335, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.043764] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.064453] env[62519]: DEBUG nova.network.neutron [req-3acb6577-48f6-4181-8cff-16998f0bcadd req-f0e02c04-9c6e-444f-838e-7b99a5435c75 service nova] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Updated VIF entry in instance network info cache for port 11187365-8def-40f6-822e-05bb089dd16b. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1549.064830] env[62519]: DEBUG nova.network.neutron [req-3acb6577-48f6-4181-8cff-16998f0bcadd req-f0e02c04-9c6e-444f-838e-7b99a5435c75 service nova] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Updating instance_info_cache with network_info: [{"id": "11187365-8def-40f6-822e-05bb089dd16b", "address": "fa:16:3e:54:e8:ea", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11187365-8d", "ovs_interfaceid": "11187365-8def-40f6-822e-05bb089dd16b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.077507] env[62519]: DEBUG nova.compute.utils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1549.079626] env[62519]: DEBUG nova.compute.manager [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1549.079837] env[62519]: DEBUG nova.network.neutron [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1549.130551] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5236c15e-090e-8e06-3395-7229deaf9113, 'name': SearchDatastore_Task, 'duration_secs': 0.012122} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.130870] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.131435] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8070aa59-3547-460a-b914-0e84620023d0/8070aa59-3547-460a-b914-0e84620023d0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1549.131785] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.132038] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1549.132395] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec644da7-5ecb-4400-ab34-760057b9abe1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.135120] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ada333f-3ad3-4168-8b30-464ffc202779 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.142013] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1549.142013] env[62519]: value = "task-1802336" [ 1549.142013] env[62519]: _type = "Task" [ 1549.142013] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.146960] env[62519]: DEBUG nova.policy [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbf140b3b87d4f8ea80c5f91a9dd9c37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f69f08ab17cc423a98f0ae56f706c62b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1549.162238] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquiring lock "22380aef-c725-43a0-a957-06ced9518c21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.162238] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "22380aef-c725-43a0-a957-06ced9518c21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.162338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquiring lock "22380aef-c725-43a0-a957-06ced9518c21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.162947] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "22380aef-c725-43a0-a957-06ced9518c21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.163248] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "22380aef-c725-43a0-a957-06ced9518c21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.165664] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.166665] env[62519]: INFO nova.compute.manager [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Terminating instance [ 1549.229861] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "f19c860f-736a-4783-8ef5-8262040e53a3" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.230174] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "f19c860f-736a-4783-8ef5-8262040e53a3" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.230443] env[62519]: INFO nova.compute.manager [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Rebooting instance [ 1549.252826] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1549.253119] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1549.257189] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe9f3b67-a682-4103-b4f4-91c495966d21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.266143] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1549.266143] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521789df-8dfe-0124-d747-88f88478950a" [ 1549.266143] env[62519]: _type = "Task" [ 1549.266143] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.288271] env[62519]: DEBUG oslo_vmware.api [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802335, 'name': RemoveSnapshot_Task, 'duration_secs': 0.578546} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.289659] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1549.290047] env[62519]: INFO nova.compute.manager [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Took 16.38 seconds to snapshot the instance on the hypervisor. [ 1549.300477] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521789df-8dfe-0124-d747-88f88478950a, 'name': SearchDatastore_Task, 'duration_secs': 0.017348} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.302947] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63e8bc37-d058-4e80-9b71-0c54de7b3ed7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.305035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.305279] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.305495] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.306047] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.306047] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.309286] env[62519]: INFO nova.compute.manager [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Terminating instance [ 1549.312756] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1549.312756] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a935fa-a677-081f-1eb3-a2a3ce182c96" [ 1549.312756] env[62519]: _type = "Task" [ 1549.312756] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.331021] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a935fa-a677-081f-1eb3-a2a3ce182c96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.452137] env[62519]: DEBUG nova.network.neutron [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Updating instance_info_cache with network_info: [{"id": "d23f0b24-455b-4112-8518-1eaca05eb428", "address": "fa:16:3e:74:68:82", "network": {"id": "e5c6f3a0-57c8-45d8-b5d5-93fd51d9adc6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1890020553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10b805c99a6945f3bcc685e1c2f9a816", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd23f0b24-45", "ovs_interfaceid": "d23f0b24-455b-4112-8518-1eaca05eb428", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.567946] env[62519]: DEBUG oslo_concurrency.lockutils [req-3acb6577-48f6-4181-8cff-16998f0bcadd req-f0e02c04-9c6e-444f-838e-7b99a5435c75 service nova] Releasing lock "refresh_cache-8070aa59-3547-460a-b914-0e84620023d0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.582739] env[62519]: DEBUG nova.compute.manager [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1549.605887] env[62519]: DEBUG nova.network.neutron [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Successfully created port: 0ac63864-3cdd-498e-b28b-054b97ccd1db {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1549.650378] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3281d3-6b96-4421-a077-94e60a9dc880 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.663535] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8483c3-445a-4f24-bc17-0a7046f30226 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.668459] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802336, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.701549] env[62519]: DEBUG nova.compute.manager [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1549.701798] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1549.703473] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37699bd0-a20b-4e6b-8693-c32cc0a5481b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.707231] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f91c61-2e77-499a-93ab-2ca868895ff1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.718690] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8058504c-54e6-4960-91ed-7c5982e90973 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.723822] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1549.724204] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd938535-2bd4-48cb-bf1a-bb467009f300 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.740955] env[62519]: DEBUG nova.compute.provider_tree [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1549.746553] env[62519]: DEBUG oslo_vmware.api [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1549.746553] env[62519]: value = "task-1802337" [ 1549.746553] env[62519]: _type = "Task" [ 1549.746553] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.756865] env[62519]: DEBUG oslo_vmware.api [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802337, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.772751] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "refresh_cache-f19c860f-736a-4783-8ef5-8262040e53a3" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.772937] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquired lock "refresh_cache-f19c860f-736a-4783-8ef5-8262040e53a3" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.773164] env[62519]: DEBUG nova.network.neutron [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1549.802452] env[62519]: DEBUG nova.compute.manager [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Instance disappeared during snapshot {{(pid=62519) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4607}} [ 1549.825464] env[62519]: DEBUG nova.compute.manager [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1549.825802] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1549.826165] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a935fa-a677-081f-1eb3-a2a3ce182c96, 'name': SearchDatastore_Task, 'duration_secs': 0.02528} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.827652] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1487857-1a17-42ba-b267-ba2be832c9ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.835023] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.835023] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 27f9e890-4733-43aa-9bf1-351d42d75418/27f9e890-4733-43aa-9bf1-351d42d75418.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1549.835023] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3de67b4b-69fb-4198-8ed7-f34977410ab3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.839845] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1549.841624] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98372dc6-43a1-42f6-98d4-72147ff3a920 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.844045] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1549.844045] env[62519]: value = "task-1802338" [ 1549.844045] env[62519]: _type = "Task" [ 1549.844045] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.853438] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802338, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.854734] env[62519]: DEBUG nova.compute.manager [None req-abaf2164-2628-41b3-aa2a-dce555f7d236 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image not found during clean up 5bc9589c-1176-4ec2-9bd9-420d0963e893 {{(pid=62519) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4613}} [ 1549.928814] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1549.929061] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1549.929263] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleting the datastore file [datastore1] 3b506d10-a427-47b8-ab5f-c35e450b7eb1 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1549.929550] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bb7e6f9-f70f-4c25-b3be-b9e66bebab66 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.936938] env[62519]: DEBUG oslo_vmware.api [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1549.936938] env[62519]: value = "task-1802340" [ 1549.936938] env[62519]: _type = "Task" [ 1549.936938] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.947695] env[62519]: DEBUG oslo_vmware.api [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.955732] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Releasing lock "refresh_cache-4e3dee19-b99a-4257-88da-1b0531e2c0f9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.955732] env[62519]: DEBUG nova.compute.manager [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Instance network_info: |[{"id": "d23f0b24-455b-4112-8518-1eaca05eb428", "address": "fa:16:3e:74:68:82", "network": {"id": "e5c6f3a0-57c8-45d8-b5d5-93fd51d9adc6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1890020553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10b805c99a6945f3bcc685e1c2f9a816", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd23f0b24-45", "ovs_interfaceid": "d23f0b24-455b-4112-8518-1eaca05eb428", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1549.955732] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:68:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd23f0b24-455b-4112-8518-1eaca05eb428', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1549.965858] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Creating folder: Project (10b805c99a6945f3bcc685e1c2f9a816). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1549.966034] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a6c9e52-8cb0-4b23-a59c-99d3b3cd4a83 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.976791] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Created folder: Project (10b805c99a6945f3bcc685e1c2f9a816) in parent group-v373567. [ 1549.976929] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Creating folder: Instances. Parent ref: group-v373688. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1549.977690] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fdfba9a3-e574-4a21-b784-05ff0e11c803 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.987074] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Created folder: Instances in parent group-v373688. [ 1549.987418] env[62519]: DEBUG oslo.service.loopingcall [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1549.988041] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1549.988336] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d687fbd-0965-4749-b7cf-49707dbcce10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.010828] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1550.010828] env[62519]: value = "task-1802343" [ 1550.010828] env[62519]: _type = "Task" [ 1550.010828] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.022301] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802343, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.157297] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802336, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.76465} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.158124] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8070aa59-3547-460a-b914-0e84620023d0/8070aa59-3547-460a-b914-0e84620023d0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1550.158124] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1550.159551] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5916e5a3-881f-4ee7-99c7-d55b82f613c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.166796] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1550.166796] env[62519]: value = "task-1802344" [ 1550.166796] env[62519]: _type = "Task" [ 1550.166796] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.178332] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802344, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.253157] env[62519]: DEBUG nova.scheduler.client.report [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1550.270506] env[62519]: DEBUG oslo_vmware.api [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802337, 'name': PowerOffVM_Task, 'duration_secs': 0.325102} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.271193] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1550.271390] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1550.271700] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6f292c4-3490-4b07-a914-29dc0ea48991 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.357490] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802338, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.367743] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1550.367979] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1550.368163] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Deleting the datastore file [datastore1] 22380aef-c725-43a0-a957-06ced9518c21 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1550.369141] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84dfeab6-c17a-4c7b-80c3-ae278a07102b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.375494] env[62519]: DEBUG oslo_vmware.api [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for the task: (returnval){ [ 1550.375494] env[62519]: value = "task-1802346" [ 1550.375494] env[62519]: _type = "Task" [ 1550.375494] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.385439] env[62519]: DEBUG oslo_vmware.api [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.447774] env[62519]: DEBUG oslo_vmware.api [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.472817} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.448244] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1550.448467] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1550.449469] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1550.449469] env[62519]: INFO nova.compute.manager [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1550.449469] env[62519]: DEBUG oslo.service.loopingcall [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1550.449469] env[62519]: DEBUG nova.compute.manager [-] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1550.449469] env[62519]: DEBUG nova.network.neutron [-] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1550.526749] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802343, 'name': CreateVM_Task, 'duration_secs': 0.444549} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.526937] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1550.527689] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.527883] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.528267] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1550.529015] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df2739d4-5dea-422d-aa63-47a83af2fe70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.533988] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1550.533988] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52498ac7-e84c-7dc8-b332-a47bb5b7f603" [ 1550.533988] env[62519]: _type = "Task" [ 1550.533988] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.555345] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52498ac7-e84c-7dc8-b332-a47bb5b7f603, 'name': SearchDatastore_Task, 'duration_secs': 0.011132} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.555931] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.555931] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1550.556149] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.556653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.556653] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1550.556741] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-543c3a4a-96d6-4b3c-bb43-3692fde43832 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.565267] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1550.565267] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1550.565994] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de66a043-bf4c-422b-a035-4e8216bef4a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.571658] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1550.571658] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b725cc-ed3c-101d-bc4c-4778d14d28ff" [ 1550.571658] env[62519]: _type = "Task" [ 1550.571658] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.580664] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b725cc-ed3c-101d-bc4c-4778d14d28ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.594478] env[62519]: DEBUG nova.compute.manager [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1550.631721] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1550.631976] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1550.632154] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1550.632359] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1550.633826] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1550.634066] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1550.634339] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1550.634583] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1550.634716] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1550.634856] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1550.635053] env[62519]: DEBUG nova.virt.hardware [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1550.639394] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63affc84-1282-408a-a8a7-d5388bd5d03d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.654091] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b287c42-825c-4b8f-bd91-4c0ffeeffe5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.682950] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802344, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093639} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.683329] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1550.684099] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6685c6-9c37-4ca0-99d6-8cbb59ff200f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.708453] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 8070aa59-3547-460a-b914-0e84620023d0/8070aa59-3547-460a-b914-0e84620023d0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1550.710153] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c5057b9-95b6-41d2-af6e-30742b93b79f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.728548] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.728837] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.734561] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1550.734561] env[62519]: value = "task-1802349" [ 1550.734561] env[62519]: _type = "Task" [ 1550.734561] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.746304] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802349, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.747160] env[62519]: DEBUG nova.network.neutron [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Updating instance_info_cache with network_info: [{"id": "e362cda5-6e64-4311-96a1-880e1f66ab32", "address": "fa:16:3e:15:fb:b0", "network": {"id": "deb202ae-72d1-47f1-8354-3ea5d8bdcd2a", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1495521208-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d004a7d2ec074aa39666ae15861b9440", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "098df9b7-d759-47f7-b756-334848cb423b", "external-id": "nsx-vlan-transportzone-765", "segmentation_id": 765, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape362cda5-6e", "ovs_interfaceid": "e362cda5-6e64-4311-96a1-880e1f66ab32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.767993] env[62519]: DEBUG nova.compute.manager [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Received event network-changed-85d5a3eb-9d3d-46ac-b538-3e317a13e6fc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1550.768272] env[62519]: DEBUG nova.compute.manager [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Refreshing instance network info cache due to event network-changed-85d5a3eb-9d3d-46ac-b538-3e317a13e6fc. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1550.769658] env[62519]: DEBUG oslo_concurrency.lockutils [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] Acquiring lock "refresh_cache-27f9e890-4733-43aa-9bf1-351d42d75418" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.769658] env[62519]: DEBUG oslo_concurrency.lockutils [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] Acquired lock "refresh_cache-27f9e890-4733-43aa-9bf1-351d42d75418" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.769658] env[62519]: DEBUG nova.network.neutron [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Refreshing network info cache for port 85d5a3eb-9d3d-46ac-b538-3e317a13e6fc {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1550.771699] env[62519]: DEBUG oslo_concurrency.lockutils [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.203s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.775531] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.091s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.776403] env[62519]: INFO nova.compute.claims [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1550.809974] env[62519]: INFO nova.scheduler.client.report [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Deleted allocations for instance c8b7568b-ba07-4f65-818b-f84910209361 [ 1550.855597] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802338, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570663} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.855891] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 27f9e890-4733-43aa-9bf1-351d42d75418/27f9e890-4733-43aa-9bf1-351d42d75418.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1550.856128] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1550.856434] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db125946-4bb3-4b5c-a872-8a2d61948a85 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.868205] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1550.868205] env[62519]: value = "task-1802351" [ 1550.868205] env[62519]: _type = "Task" [ 1550.868205] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.881332] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802351, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.894058] env[62519]: DEBUG oslo_vmware.api [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Task: {'id': task-1802346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179007} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.894058] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1550.894058] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1550.894058] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1550.894058] env[62519]: INFO nova.compute.manager [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1550.894058] env[62519]: DEBUG oslo.service.loopingcall [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1550.894881] env[62519]: DEBUG nova.compute.manager [-] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1550.895044] env[62519]: DEBUG nova.network.neutron [-] [instance: 22380aef-c725-43a0-a957-06ced9518c21] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1551.082666] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b725cc-ed3c-101d-bc4c-4778d14d28ff, 'name': SearchDatastore_Task, 'duration_secs': 0.009687} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.083546] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77fa0b5a-5878-4dde-8674-4f691624a636 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.089056] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1551.089056] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c60088-9414-d79f-d48c-187a80ae8803" [ 1551.089056] env[62519]: _type = "Task" [ 1551.089056] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.098204] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c60088-9414-d79f-d48c-187a80ae8803, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.247342] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802349, 'name': ReconfigVM_Task, 'duration_secs': 0.36462} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.247342] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 8070aa59-3547-460a-b914-0e84620023d0/8070aa59-3547-460a-b914-0e84620023d0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1551.247667] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf2c6c7f-edbf-442a-aa27-9b23f0b53c40 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.249855] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Releasing lock "refresh_cache-f19c860f-736a-4783-8ef5-8262040e53a3" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.257876] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1551.257876] env[62519]: value = "task-1802352" [ 1551.257876] env[62519]: _type = "Task" [ 1551.257876] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.270413] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802352, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.326415] env[62519]: DEBUG oslo_concurrency.lockutils [None req-799af596-e4ed-43b4-bbf9-d5b7ae219998 tempest-ServersAdminNegativeTestJSON-2096466474 tempest-ServersAdminNegativeTestJSON-2096466474-project-member] Lock "c8b7568b-ba07-4f65-818b-f84910209361" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.695s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.379864] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802351, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08352} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.380161] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1551.381248] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999ad845-1efe-4617-9b1b-5b8d25c7206c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.407779] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 27f9e890-4733-43aa-9bf1-351d42d75418/27f9e890-4733-43aa-9bf1-351d42d75418.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1551.410948] env[62519]: DEBUG nova.network.neutron [-] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.412186] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8cb7b21-4f20-4a5c-901c-80308dd11cee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.431184] env[62519]: INFO nova.compute.manager [-] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Took 0.98 seconds to deallocate network for instance. [ 1551.441022] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1551.441022] env[62519]: value = "task-1802353" [ 1551.441022] env[62519]: _type = "Task" [ 1551.441022] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.458462] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802353, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.575628] env[62519]: DEBUG nova.network.neutron [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Successfully updated port: 0ac63864-3cdd-498e-b28b-054b97ccd1db {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1551.605232] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c60088-9414-d79f-d48c-187a80ae8803, 'name': SearchDatastore_Task, 'duration_secs': 0.011694} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.605548] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.606478] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4e3dee19-b99a-4257-88da-1b0531e2c0f9/4e3dee19-b99a-4257-88da-1b0531e2c0f9.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1551.606478] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0859f2d1-f692-43e2-aada-a77de9717547 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.613581] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1551.613581] env[62519]: value = "task-1802354" [ 1551.613581] env[62519]: _type = "Task" [ 1551.613581] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.622316] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.651354] env[62519]: DEBUG nova.network.neutron [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Updated VIF entry in instance network info cache for port 85d5a3eb-9d3d-46ac-b538-3e317a13e6fc. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1551.651748] env[62519]: DEBUG nova.network.neutron [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Updating instance_info_cache with network_info: [{"id": "85d5a3eb-9d3d-46ac-b538-3e317a13e6fc", "address": "fa:16:3e:74:ba:4e", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85d5a3eb-9d", "ovs_interfaceid": "85d5a3eb-9d3d-46ac-b538-3e317a13e6fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.760078] env[62519]: DEBUG nova.compute.manager [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1551.760078] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6126e94-d0ca-44f0-aa3b-dd7d81b55ed7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.773962] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802352, 'name': Rename_Task, 'duration_secs': 0.152269} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.777145] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1551.777145] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86ec8def-7e00-47b5-8674-5faefdceec1a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.784372] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1551.784372] env[62519]: value = "task-1802355" [ 1551.784372] env[62519]: _type = "Task" [ 1551.784372] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.802569] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802355, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.942775] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.950962] env[62519]: DEBUG nova.compute.manager [req-ee34fd1d-c898-4f78-89e1-4b4845497c04 req-dc0a9a59-984d-4b30-9c06-4bf724d04e1a service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Received event network-vif-deleted-ca6c27e1-d55f-43cc-8dee-29ddc604baad {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1551.951178] env[62519]: INFO nova.compute.manager [req-ee34fd1d-c898-4f78-89e1-4b4845497c04 req-dc0a9a59-984d-4b30-9c06-4bf724d04e1a service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Neutron deleted interface ca6c27e1-d55f-43cc-8dee-29ddc604baad; detaching it from the instance and deleting it from the info cache [ 1551.951350] env[62519]: DEBUG nova.network.neutron [req-ee34fd1d-c898-4f78-89e1-4b4845497c04 req-dc0a9a59-984d-4b30-9c06-4bf724d04e1a service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.964022] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802353, 'name': ReconfigVM_Task, 'duration_secs': 0.504531} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.969331] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 27f9e890-4733-43aa-9bf1-351d42d75418/27f9e890-4733-43aa-9bf1-351d42d75418.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1551.970926] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1b26881-3f0f-4e52-9a05-7be0ef88188d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.979696] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1551.979696] env[62519]: value = "task-1802356" [ 1551.979696] env[62519]: _type = "Task" [ 1551.979696] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.996923] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802356, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.020218] env[62519]: DEBUG nova.network.neutron [-] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.081259] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "refresh_cache-24cb49c8-b2ef-4ede-aea6-6e34081beca1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.082081] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "refresh_cache-24cb49c8-b2ef-4ede-aea6-6e34081beca1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.082517] env[62519]: DEBUG nova.network.neutron [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1552.127927] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802354, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493896} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.128318] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4e3dee19-b99a-4257-88da-1b0531e2c0f9/4e3dee19-b99a-4257-88da-1b0531e2c0f9.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1552.128591] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1552.128883] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8663cab-cf00-4ee4-a7a7-76b892f9df9e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.139422] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1552.139422] env[62519]: value = "task-1802357" [ 1552.139422] env[62519]: _type = "Task" [ 1552.139422] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.151030] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802357, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.157867] env[62519]: DEBUG oslo_concurrency.lockutils [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] Releasing lock "refresh_cache-27f9e890-4733-43aa-9bf1-351d42d75418" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.158504] env[62519]: DEBUG nova.compute.manager [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Received event network-vif-plugged-d23f0b24-455b-4112-8518-1eaca05eb428 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1552.158504] env[62519]: DEBUG oslo_concurrency.lockutils [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] Acquiring lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.158652] env[62519]: DEBUG oslo_concurrency.lockutils [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] Lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.159187] env[62519]: DEBUG oslo_concurrency.lockutils [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] Lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.159187] env[62519]: DEBUG nova.compute.manager [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] No waiting events found dispatching network-vif-plugged-d23f0b24-455b-4112-8518-1eaca05eb428 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1552.159187] env[62519]: WARNING nova.compute.manager [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Received unexpected event network-vif-plugged-d23f0b24-455b-4112-8518-1eaca05eb428 for instance with vm_state building and task_state spawning. [ 1552.159336] env[62519]: DEBUG nova.compute.manager [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Received event network-changed-d23f0b24-455b-4112-8518-1eaca05eb428 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1552.159568] env[62519]: DEBUG nova.compute.manager [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Refreshing instance network info cache due to event network-changed-d23f0b24-455b-4112-8518-1eaca05eb428. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1552.159892] env[62519]: DEBUG oslo_concurrency.lockutils [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] Acquiring lock "refresh_cache-4e3dee19-b99a-4257-88da-1b0531e2c0f9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.160217] env[62519]: DEBUG oslo_concurrency.lockutils [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] Acquired lock "refresh_cache-4e3dee19-b99a-4257-88da-1b0531e2c0f9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.160217] env[62519]: DEBUG nova.network.neutron [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Refreshing network info cache for port d23f0b24-455b-4112-8518-1eaca05eb428 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1552.297050] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802355, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.343512] env[62519]: DEBUG oslo_concurrency.lockutils [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "c61c893f-826b-4874-b253-de6fbffa9e5a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.345264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.387984] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.388343] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.457616] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b25b7b2-1534-45cf-8536-2fce285dbf22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.467543] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6c111c-0f04-482b-bdc6-e8deaf771258 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.614632] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113c81b1-9d97-4cd1-8e48-c5fe383a5f7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.614632] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2b25ce-0bb2-4f1b-8e03-8bef55d0c992 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.614632] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802356, 'name': Rename_Task, 'duration_secs': 0.219008} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.614632] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1552.614632] env[62519]: DEBUG nova.compute.manager [req-ee34fd1d-c898-4f78-89e1-4b4845497c04 req-dc0a9a59-984d-4b30-9c06-4bf724d04e1a service nova] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Detach interface failed, port_id=ca6c27e1-d55f-43cc-8dee-29ddc604baad, reason: Instance 22380aef-c725-43a0-a957-06ced9518c21 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1552.614632] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a11c16fb-0ccc-461f-b6b1-6d8c9dbe34cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.614632] env[62519]: INFO nova.compute.manager [-] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Took 1.65 seconds to deallocate network for instance. [ 1552.614632] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1188d602-4140-4134-8b32-f7ce185974df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.614632] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1552.614632] env[62519]: value = "task-1802358" [ 1552.614632] env[62519]: _type = "Task" [ 1552.614632] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.614632] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1925a9d2-28a9-4b63-9eaa-a464a6ae40d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.614632] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802358, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.614632] env[62519]: DEBUG nova.compute.provider_tree [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1552.652057] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802357, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082924} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.652057] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1552.652057] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d1c12d-9406-4760-9cd4-2981cb5432e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.677576] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 4e3dee19-b99a-4257-88da-1b0531e2c0f9/4e3dee19-b99a-4257-88da-1b0531e2c0f9.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1552.678590] env[62519]: DEBUG nova.network.neutron [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1552.681525] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfc87845-3264-4cce-9674-856279d1eba6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.702964] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1552.702964] env[62519]: value = "task-1802359" [ 1552.702964] env[62519]: _type = "Task" [ 1552.702964] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.713493] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802359, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.790735] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a333444-4fc3-4bc9-92ea-b60c8175aed1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.802632] env[62519]: DEBUG oslo_vmware.api [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802355, 'name': PowerOnVM_Task, 'duration_secs': 0.718728} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.804993] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1552.805250] env[62519]: INFO nova.compute.manager [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Took 11.15 seconds to spawn the instance on the hypervisor. [ 1552.805491] env[62519]: DEBUG nova.compute.manager [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1552.805768] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Doing hard reboot of VM {{(pid=62519) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1552.807635] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86af7a3b-814e-456f-84ad-5b56e1f8ed41 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.809451] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-e618c2b1-f693-4545-b1f0-9d05344dc266 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.820488] env[62519]: DEBUG oslo_vmware.api [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1552.820488] env[62519]: value = "task-1802360" [ 1552.820488] env[62519]: _type = "Task" [ 1552.820488] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.831512] env[62519]: DEBUG oslo_vmware.api [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802360, 'name': ResetVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.852487] env[62519]: DEBUG nova.compute.utils [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1552.982893] env[62519]: DEBUG nova.network.neutron [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Updating instance_info_cache with network_info: [{"id": "0ac63864-3cdd-498e-b28b-054b97ccd1db", "address": "fa:16:3e:ba:0f:0f", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac63864-3c", "ovs_interfaceid": "0ac63864-3cdd-498e-b28b-054b97ccd1db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.054352] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.058163] env[62519]: DEBUG nova.network.neutron [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Updated VIF entry in instance network info cache for port d23f0b24-455b-4112-8518-1eaca05eb428. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1553.058306] env[62519]: DEBUG nova.network.neutron [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Updating instance_info_cache with network_info: [{"id": "d23f0b24-455b-4112-8518-1eaca05eb428", "address": "fa:16:3e:74:68:82", "network": {"id": "e5c6f3a0-57c8-45d8-b5d5-93fd51d9adc6", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1890020553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10b805c99a6945f3bcc685e1c2f9a816", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd23f0b24-45", "ovs_interfaceid": "d23f0b24-455b-4112-8518-1eaca05eb428", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.077151] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802358, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.093570] env[62519]: DEBUG nova.scheduler.client.report [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1553.115821] env[62519]: DEBUG nova.compute.manager [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Received event network-vif-deleted-cd903c9d-5093-4f0a-9439-683a3b25eef0 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1553.116227] env[62519]: DEBUG nova.compute.manager [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Received event network-vif-plugged-0ac63864-3cdd-498e-b28b-054b97ccd1db {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1553.116485] env[62519]: DEBUG oslo_concurrency.lockutils [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] Acquiring lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.116787] env[62519]: DEBUG oslo_concurrency.lockutils [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] Lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.117062] env[62519]: DEBUG oslo_concurrency.lockutils [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] Lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.117379] env[62519]: DEBUG nova.compute.manager [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] No waiting events found dispatching network-vif-plugged-0ac63864-3cdd-498e-b28b-054b97ccd1db {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1553.117620] env[62519]: WARNING nova.compute.manager [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Received unexpected event network-vif-plugged-0ac63864-3cdd-498e-b28b-054b97ccd1db for instance with vm_state building and task_state spawning. [ 1553.117835] env[62519]: DEBUG nova.compute.manager [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Received event network-changed-0ac63864-3cdd-498e-b28b-054b97ccd1db {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1553.118044] env[62519]: DEBUG nova.compute.manager [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Refreshing instance network info cache due to event network-changed-0ac63864-3cdd-498e-b28b-054b97ccd1db. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1553.118244] env[62519]: DEBUG oslo_concurrency.lockutils [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] Acquiring lock "refresh_cache-24cb49c8-b2ef-4ede-aea6-6e34081beca1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.212367] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802359, 'name': ReconfigVM_Task, 'duration_secs': 0.482272} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.212730] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 4e3dee19-b99a-4257-88da-1b0531e2c0f9/4e3dee19-b99a-4257-88da-1b0531e2c0f9.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1553.213417] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2347cc61-e2f3-40e7-ae61-d3548e81d31f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.220080] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1553.220080] env[62519]: value = "task-1802361" [ 1553.220080] env[62519]: _type = "Task" [ 1553.220080] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.228900] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802361, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.334426] env[62519]: INFO nova.compute.manager [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Took 47.72 seconds to build instance. [ 1553.343844] env[62519]: DEBUG oslo_vmware.api [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802360, 'name': ResetVM_Task, 'duration_secs': 0.103217} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.345526] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Did hard reboot of VM {{(pid=62519) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1553.346943] env[62519]: DEBUG nova.compute.manager [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1553.348028] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1a7847-eb13-43eb-94f2-b72ec0a9d233 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.361517] env[62519]: DEBUG oslo_concurrency.lockutils [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.486931] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "refresh_cache-24cb49c8-b2ef-4ede-aea6-6e34081beca1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.487837] env[62519]: DEBUG nova.compute.manager [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Instance network_info: |[{"id": "0ac63864-3cdd-498e-b28b-054b97ccd1db", "address": "fa:16:3e:ba:0f:0f", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac63864-3c", "ovs_interfaceid": "0ac63864-3cdd-498e-b28b-054b97ccd1db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1553.489458] env[62519]: DEBUG oslo_concurrency.lockutils [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] Acquired lock "refresh_cache-24cb49c8-b2ef-4ede-aea6-6e34081beca1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.489458] env[62519]: DEBUG nova.network.neutron [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Refreshing network info cache for port 0ac63864-3cdd-498e-b28b-054b97ccd1db {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.492497] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:0f:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4712af2-45ef-4652-8d2c-482ec70056d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ac63864-3cdd-498e-b28b-054b97ccd1db', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1553.503355] env[62519]: DEBUG oslo.service.loopingcall [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1553.504298] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1553.504545] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea37e024-3ab3-4354-b1b1-f43fb0dba082 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.527636] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1553.527636] env[62519]: value = "task-1802362" [ 1553.527636] env[62519]: _type = "Task" [ 1553.527636] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.537679] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802362, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.566849] env[62519]: DEBUG oslo_vmware.rw_handles [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b489a0-c28f-4c20-f852-e0fdaea03b98/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1553.567398] env[62519]: DEBUG oslo_concurrency.lockutils [req-cac18f16-8c07-4910-83bf-b0b071494ba7 req-9175ebc7-e498-4b1c-b017-74eb8d949e00 service nova] Releasing lock "refresh_cache-4e3dee19-b99a-4257-88da-1b0531e2c0f9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.568646] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65857b81-2cfb-4a73-aec1-bcad51c8124d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.574353] env[62519]: DEBUG oslo_vmware.api [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802358, 'name': PowerOnVM_Task, 'duration_secs': 0.702943} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.575996] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1553.576355] env[62519]: INFO nova.compute.manager [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Took 9.17 seconds to spawn the instance on the hypervisor. [ 1553.576579] env[62519]: DEBUG nova.compute.manager [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1553.581443] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c0d437-2255-4702-b2fb-981b49a48a77 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.584547] env[62519]: DEBUG oslo_concurrency.lockutils [None req-76861171-414a-4622-8909-66c97ea75102 tempest-ServersListShow296Test-771773119 tempest-ServersListShow296Test-771773119-project-member] Acquiring lock "c28254d5-90ec-421a-b7a5-5b6f16cb9268" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.585348] env[62519]: DEBUG oslo_concurrency.lockutils [None req-76861171-414a-4622-8909-66c97ea75102 tempest-ServersListShow296Test-771773119 tempest-ServersListShow296Test-771773119-project-member] Lock "c28254d5-90ec-421a-b7a5-5b6f16cb9268" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.585348] env[62519]: DEBUG oslo_vmware.rw_handles [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b489a0-c28f-4c20-f852-e0fdaea03b98/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1553.585348] env[62519]: ERROR oslo_vmware.rw_handles [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b489a0-c28f-4c20-f852-e0fdaea03b98/disk-0.vmdk due to incomplete transfer. [ 1553.585658] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-58417150-3e3c-47e9-989e-17249d671152 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.595577] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.821s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.596940] env[62519]: DEBUG nova.compute.manager [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1553.598868] env[62519]: DEBUG oslo_vmware.rw_handles [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b489a0-c28f-4c20-f852-e0fdaea03b98/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1553.599194] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Uploaded image 59def757-f227-4fcd-966f-6e6296b8e4b8 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1553.600785] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1553.601508] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.575s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.602956] env[62519]: INFO nova.compute.claims [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1553.606298] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3a4bd093-f335-4eb2-9f08-8b010cf8d043 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.614118] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1553.614118] env[62519]: value = "task-1802363" [ 1553.614118] env[62519]: _type = "Task" [ 1553.614118] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.623752] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802363, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.731851] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802361, 'name': Rename_Task, 'duration_secs': 0.191786} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.732684] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1553.733456] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb5d9ebe-24c7-416f-a268-78623e585fde {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.740634] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1553.740634] env[62519]: value = "task-1802364" [ 1553.740634] env[62519]: _type = "Task" [ 1553.740634] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.750813] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.862942] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570a2219-1aaf-48cc-963b-0fa534407194 tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "f19c860f-736a-4783-8ef5-8262040e53a3" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.633s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.040711] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802362, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.106654] env[62519]: DEBUG nova.compute.utils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1554.112166] env[62519]: DEBUG nova.compute.manager [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1554.112166] env[62519]: DEBUG nova.network.neutron [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1554.113703] env[62519]: INFO nova.compute.manager [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Took 44.51 seconds to build instance. [ 1554.123339] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802363, 'name': Destroy_Task, 'duration_secs': 0.417689} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.124474] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Destroyed the VM [ 1554.124735] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1554.124998] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d71a34b3-b5c0-4a96-a007-21ef7d8d6728 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.133524] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1554.133524] env[62519]: value = "task-1802366" [ 1554.133524] env[62519]: _type = "Task" [ 1554.133524] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.142101] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802366, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.211139] env[62519]: DEBUG nova.policy [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b04fa80750184b97a16ec1880e0a585c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '683d60927fdf424386ffcfaa344a7af6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1554.253657] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802364, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.355364] env[62519]: DEBUG nova.network.neutron [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Updated VIF entry in instance network info cache for port 0ac63864-3cdd-498e-b28b-054b97ccd1db. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.355744] env[62519]: DEBUG nova.network.neutron [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Updating instance_info_cache with network_info: [{"id": "0ac63864-3cdd-498e-b28b-054b97ccd1db", "address": "fa:16:3e:ba:0f:0f", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ac63864-3c", "ovs_interfaceid": "0ac63864-3cdd-498e-b28b-054b97ccd1db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.434315] env[62519]: DEBUG oslo_concurrency.lockutils [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "c61c893f-826b-4874-b253-de6fbffa9e5a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.434690] env[62519]: DEBUG oslo_concurrency.lockutils [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.434988] env[62519]: INFO nova.compute.manager [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Attaching volume 158562d2-c9d2-4053-924e-e9c2c4cb6e65 to /dev/sdb [ 1554.478884] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe784dc3-ac19-45d6-bb37-6d9b2b650ef6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.485902] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041051af-ad0e-4bd4-b15d-6e6a3d81e3ef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.499786] env[62519]: DEBUG nova.virt.block_device [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Updating existing volume attachment record: 944c6eef-1bb9-47d5-a383-e89a64eabc2f {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1554.545829] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802362, 'name': CreateVM_Task, 'duration_secs': 0.66797} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.545829] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1554.545829] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.545829] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.545829] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1554.545829] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2995bd05-71b9-4add-802f-c3413020bc0d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.550100] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1554.550100] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ff7b2f-700e-f143-5fdc-ca9ddb76a0a9" [ 1554.550100] env[62519]: _type = "Task" [ 1554.550100] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.561194] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ff7b2f-700e-f143-5fdc-ca9ddb76a0a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.577962] env[62519]: DEBUG nova.network.neutron [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Successfully created port: d13d36cf-2bb2-4dde-a6de-51669dc83351 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1554.615951] env[62519]: DEBUG nova.compute.manager [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1554.656883] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802366, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.753052] env[62519]: DEBUG oslo_vmware.api [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802364, 'name': PowerOnVM_Task, 'duration_secs': 0.891765} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.753052] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1554.753052] env[62519]: INFO nova.compute.manager [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Took 7.24 seconds to spawn the instance on the hypervisor. [ 1554.753403] env[62519]: DEBUG nova.compute.manager [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1554.755033] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec6275f-0e53-49e5-ba99-85d5f0a7fabb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.851837] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51a5c574-31b5-4e20-8d04-338349c55e60 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "8070aa59-3547-460a-b914-0e84620023d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.431s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.858335] env[62519]: DEBUG oslo_concurrency.lockutils [req-5fae947c-bc3f-4c3e-a9c7-f30485b897c9 req-af1daaf4-077c-4021-853d-d1ad3e37a5f8 service nova] Releasing lock "refresh_cache-24cb49c8-b2ef-4ede-aea6-6e34081beca1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.063204] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ff7b2f-700e-f143-5fdc-ca9ddb76a0a9, 'name': SearchDatastore_Task, 'duration_secs': 0.036087} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.063616] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.064671] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1555.064671] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.064671] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.064671] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1555.064859] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c69a7be8-584c-43d2-b14d-08d1d85c3930 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.075854] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1555.077363] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1555.078031] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b444d501-4b91-42b2-a00b-bffe575b79c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.083819] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1555.083819] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dbd6f3-d795-cd09-6938-0f6831b0c32c" [ 1555.083819] env[62519]: _type = "Task" [ 1555.083819] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.092524] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dbd6f3-d795-cd09-6938-0f6831b0c32c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.139642] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquiring lock "540167be-5295-4e28-9b25-16317746dd0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.139881] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "540167be-5295-4e28-9b25-16317746dd0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.152281] env[62519]: DEBUG oslo_vmware.api [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802366, 'name': RemoveSnapshot_Task, 'duration_secs': 0.614672} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.152646] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1555.152916] env[62519]: INFO nova.compute.manager [None req-4cfc95a6-4681-435a-a83c-71f03554060d tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Took 16.80 seconds to snapshot the instance on the hypervisor. [ 1555.278409] env[62519]: INFO nova.compute.manager [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Took 45.58 seconds to build instance. [ 1555.317946] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc6410f-8a70-4874-91f4-8276573c6bf6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.325676] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cdee19-d0ba-48a5-9f2e-27c93a5169d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.358782] env[62519]: DEBUG nova.compute.manager [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1555.365031] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dceb2da2-a43d-4ae3-9270-4a7e5110b2d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.371270] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de0f04a-2aa6-4a86-9bb9-43000f1ee650 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.385774] env[62519]: DEBUG nova.compute.provider_tree [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1555.507561] env[62519]: DEBUG oslo_concurrency.lockutils [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "f19c860f-736a-4783-8ef5-8262040e53a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.507852] env[62519]: DEBUG oslo_concurrency.lockutils [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "f19c860f-736a-4783-8ef5-8262040e53a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.508514] env[62519]: DEBUG oslo_concurrency.lockutils [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "f19c860f-736a-4783-8ef5-8262040e53a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.508717] env[62519]: DEBUG oslo_concurrency.lockutils [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "f19c860f-736a-4783-8ef5-8262040e53a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.508889] env[62519]: DEBUG oslo_concurrency.lockutils [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "f19c860f-736a-4783-8ef5-8262040e53a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.511613] env[62519]: INFO nova.compute.manager [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Terminating instance [ 1555.597213] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dbd6f3-d795-cd09-6938-0f6831b0c32c, 'name': SearchDatastore_Task, 'duration_secs': 0.012183} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.598106] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94d134c8-5990-4340-a5d0-ad4d6e1f5ce5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.603666] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1555.603666] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bb158-77a1-057b-977f-75caa81a50fd" [ 1555.603666] env[62519]: _type = "Task" [ 1555.603666] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.611996] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bb158-77a1-057b-977f-75caa81a50fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.630799] env[62519]: DEBUG nova.compute.manager [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1555.633274] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d5d00165-6954-4f25-bfe4-c298fa7922a8 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "27f9e890-4733-43aa-9bf1-351d42d75418" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.782s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.659668] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1555.659783] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1555.659834] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1555.660039] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1555.660170] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1555.660365] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1555.660504] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1555.661073] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1555.661073] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1555.661073] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1555.661283] env[62519]: DEBUG nova.virt.hardware [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1555.664762] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5bc54d-02da-4dab-b924-14485b813293 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.674793] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bc98ea-6f67-41bf-837d-c7b6245d0e13 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.889052] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.890028] env[62519]: DEBUG nova.scheduler.client.report [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1556.015835] env[62519]: DEBUG nova.compute.manager [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1556.016050] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1556.017078] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2a66af-18b5-4e8b-9c08-a4d57e1ed5bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.026454] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1556.026659] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5710d1c-e9ea-47bf-8198-52f173bb1cd7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.034466] env[62519]: DEBUG oslo_vmware.api [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1556.034466] env[62519]: value = "task-1802371" [ 1556.034466] env[62519]: _type = "Task" [ 1556.034466] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.044275] env[62519]: DEBUG oslo_vmware.api [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.117936] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bb158-77a1-057b-977f-75caa81a50fd, 'name': SearchDatastore_Task, 'duration_secs': 0.0224} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.117936] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.117936] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 24cb49c8-b2ef-4ede-aea6-6e34081beca1/24cb49c8-b2ef-4ede-aea6-6e34081beca1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1556.117936] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ec97a25-9d30-42bc-aca1-a18eed8db9c3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.127617] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1556.127617] env[62519]: value = "task-1802372" [ 1556.127617] env[62519]: _type = "Task" [ 1556.127617] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.138165] env[62519]: DEBUG nova.compute.manager [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1556.144987] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.402354] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.800s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.406425] env[62519]: DEBUG nova.compute.manager [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1556.411076] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.311s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.411076] env[62519]: DEBUG nova.objects.instance [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1556.545567] env[62519]: DEBUG oslo_vmware.api [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802371, 'name': PowerOffVM_Task, 'duration_secs': 0.243533} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.545914] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1556.546144] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1556.546459] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d587667-ae1e-4ff6-93e3-adbaa88580e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.561300] env[62519]: DEBUG nova.network.neutron [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Successfully updated port: d13d36cf-2bb2-4dde-a6de-51669dc83351 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1556.635528] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1556.635748] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1556.635932] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Deleting the datastore file [datastore1] f19c860f-736a-4783-8ef5-8262040e53a3 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1556.636714] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4edd3256-36d4-4118-81e6-cc08f38165c6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.646020] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802372, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.647773] env[62519]: DEBUG oslo_vmware.api [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for the task: (returnval){ [ 1556.647773] env[62519]: value = "task-1802374" [ 1556.647773] env[62519]: _type = "Task" [ 1556.647773] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.662300] env[62519]: DEBUG oslo_vmware.api [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802374, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.663781] env[62519]: DEBUG nova.compute.manager [req-5814c089-546e-4925-923f-373331d917c2 req-c954084f-9b54-41d9-a8a7-db7a849ba8b6 service nova] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Received event network-vif-plugged-d13d36cf-2bb2-4dde-a6de-51669dc83351 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1556.664038] env[62519]: DEBUG oslo_concurrency.lockutils [req-5814c089-546e-4925-923f-373331d917c2 req-c954084f-9b54-41d9-a8a7-db7a849ba8b6 service nova] Acquiring lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.664249] env[62519]: DEBUG oslo_concurrency.lockutils [req-5814c089-546e-4925-923f-373331d917c2 req-c954084f-9b54-41d9-a8a7-db7a849ba8b6 service nova] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.664412] env[62519]: DEBUG oslo_concurrency.lockutils [req-5814c089-546e-4925-923f-373331d917c2 req-c954084f-9b54-41d9-a8a7-db7a849ba8b6 service nova] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.664589] env[62519]: DEBUG nova.compute.manager [req-5814c089-546e-4925-923f-373331d917c2 req-c954084f-9b54-41d9-a8a7-db7a849ba8b6 service nova] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] No waiting events found dispatching network-vif-plugged-d13d36cf-2bb2-4dde-a6de-51669dc83351 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1556.664765] env[62519]: WARNING nova.compute.manager [req-5814c089-546e-4925-923f-373331d917c2 req-c954084f-9b54-41d9-a8a7-db7a849ba8b6 service nova] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Received unexpected event network-vif-plugged-d13d36cf-2bb2-4dde-a6de-51669dc83351 for instance with vm_state building and task_state spawning. [ 1556.681561] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.792619] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069fe864-a843-4aad-b4f0-161d16fbc86a tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.655s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.919244] env[62519]: DEBUG nova.compute.utils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1556.922861] env[62519]: DEBUG nova.compute.manager [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1556.923204] env[62519]: DEBUG nova.network.neutron [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1557.027043] env[62519]: DEBUG nova.policy [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a45b289ccc56467b85f612a102eebf72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae1756c7a3124e6aa1099cf99d6f6285', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1557.071167] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-d8780c40-0099-4ccc-84ae-72fbb14fa1ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.071167] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-d8780c40-0099-4ccc-84ae-72fbb14fa1ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.071167] env[62519]: DEBUG nova.network.neutron [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1557.087187] env[62519]: DEBUG nova.compute.manager [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1557.088598] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85316ce-3014-44ac-8397-581e07274641 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.138616] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.706484} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.139036] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 24cb49c8-b2ef-4ede-aea6-6e34081beca1/24cb49c8-b2ef-4ede-aea6-6e34081beca1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1557.139119] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1557.139323] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aea73cbb-8972-4d19-b013-c9e72e25cc20 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.146693] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1557.146693] env[62519]: value = "task-1802376" [ 1557.146693] env[62519]: _type = "Task" [ 1557.146693] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.162868] env[62519]: DEBUG oslo_vmware.api [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802374, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.162868] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802376, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.294012] env[62519]: DEBUG nova.compute.manager [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1557.426194] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84de915f-3960-4f49-9d37-c92acb68e404 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.427578] env[62519]: DEBUG nova.compute.manager [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1557.431413] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.046s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.431646] env[62519]: DEBUG nova.objects.instance [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lazy-loading 'resources' on Instance uuid 51eaac08-75fd-49f9-9b1a-cc2a2d799634 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1557.464070] env[62519]: DEBUG nova.compute.manager [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1557.465063] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62d4415-8102-407a-80d3-22d95a6d8521 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.498608] env[62519]: DEBUG nova.network.neutron [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Successfully created port: 1cf55e56-b406-4e45-9b63-912d4587f930 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1557.599529] env[62519]: INFO nova.compute.manager [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] instance snapshotting [ 1557.603489] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2569441-099a-4368-9f23-510ce4aec9b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.626887] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21686446-4678-4826-8a1c-b06d66724d96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.640030] env[62519]: DEBUG nova.network.neutron [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1557.661643] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802376, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070242} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.664705] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1557.665028] env[62519]: DEBUG oslo_vmware.api [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Task: {'id': task-1802374, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.609831} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.665766] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdb2f84-3204-4870-bf31-3d1e62afa47f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.668490] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1557.668667] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1557.668758] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1557.668985] env[62519]: INFO nova.compute.manager [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1557.669251] env[62519]: DEBUG oslo.service.loopingcall [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1557.669459] env[62519]: DEBUG nova.compute.manager [-] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1557.669627] env[62519]: DEBUG nova.network.neutron [-] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1557.694872] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 24cb49c8-b2ef-4ede-aea6-6e34081beca1/24cb49c8-b2ef-4ede-aea6-6e34081beca1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1557.695682] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e864d9bc-5b99-46d1-8d14-1e4f314a81e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.717278] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1557.717278] env[62519]: value = "task-1802377" [ 1557.717278] env[62519]: _type = "Task" [ 1557.717278] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.727325] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802377, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.837022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.978445] env[62519]: INFO nova.compute.manager [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] instance snapshotting [ 1557.990176] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff298b7-ed7f-46bf-aa43-d03adb32c6af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.011167] env[62519]: DEBUG nova.network.neutron [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Updating instance_info_cache with network_info: [{"id": "d13d36cf-2bb2-4dde-a6de-51669dc83351", "address": "fa:16:3e:bc:fe:0c", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13d36cf-2b", "ovs_interfaceid": "d13d36cf-2bb2-4dde-a6de-51669dc83351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.018944] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eca82a0-e8c8-4a5e-bdf3-9474af24ebc5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.144284] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1558.144595] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f1545505-683f-4372-815d-b1a2ef43768c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.149606] env[62519]: DEBUG nova.compute.manager [req-612754a7-ecb0-44f4-9fd1-4aa4aa59e8ec req-c13f5c74-0f12-4047-ba08-7f23366ffc42 service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Received event network-vif-deleted-e362cda5-6e64-4311-96a1-880e1f66ab32 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1558.149606] env[62519]: INFO nova.compute.manager [req-612754a7-ecb0-44f4-9fd1-4aa4aa59e8ec req-c13f5c74-0f12-4047-ba08-7f23366ffc42 service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Neutron deleted interface e362cda5-6e64-4311-96a1-880e1f66ab32; detaching it from the instance and deleting it from the info cache [ 1558.149739] env[62519]: DEBUG nova.network.neutron [req-612754a7-ecb0-44f4-9fd1-4aa4aa59e8ec req-c13f5c74-0f12-4047-ba08-7f23366ffc42 service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.159747] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1558.159747] env[62519]: value = "task-1802379" [ 1558.159747] env[62519]: _type = "Task" [ 1558.159747] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.170716] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802379, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.238019] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802377, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.445376] env[62519]: DEBUG nova.compute.manager [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1558.484584] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T08:12:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='95834530-4e39-49f6-a893-5ac841902368',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2145697929',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1558.484732] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1558.485138] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1558.485138] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1558.485268] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1558.485694] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1558.485694] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1558.485694] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1558.485832] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1558.485960] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1558.486135] env[62519]: DEBUG nova.virt.hardware [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1558.487634] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b96f54-94df-414e-ac98-1ddd2abbbf84 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.499677] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138fd291-a627-481b-9d03-9cd4efbd9b9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.523508] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-d8780c40-0099-4ccc-84ae-72fbb14fa1ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.523827] env[62519]: DEBUG nova.compute.manager [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Instance network_info: |[{"id": "d13d36cf-2bb2-4dde-a6de-51669dc83351", "address": "fa:16:3e:bc:fe:0c", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13d36cf-2b", "ovs_interfaceid": "d13d36cf-2bb2-4dde-a6de-51669dc83351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1558.524453] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:fe:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd13d36cf-2bb2-4dde-a6de-51669dc83351', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1558.531842] env[62519]: DEBUG oslo.service.loopingcall [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1558.533194] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1558.533438] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1558.533646] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3a6aff01-88e9-474e-8c99-3907d7863cb1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.535677] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b218e5e-496f-430d-b206-23c7d8faef0d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.560611] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1558.560611] env[62519]: value = "task-1802380" [ 1558.560611] env[62519]: _type = "Task" [ 1558.560611] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.561618] env[62519]: DEBUG nova.network.neutron [-] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.562836] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1558.562836] env[62519]: value = "task-1802381" [ 1558.562836] env[62519]: _type = "Task" [ 1558.562836] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.580830] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802381, 'name': CreateVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.581129] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802380, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.631097] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51533878-e8f9-49de-a3ed-b41c03ff4ad7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.648295] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9afa4f-f5e2-4b2f-8cb1-fd24f957127a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.682678] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-078e22b6-96cc-452f-9fac-4b197f458811 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.687404] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620b35d4-e1d0-49b6-b1ec-1fdd37062742 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.698528] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e80beb-aa3e-48ca-b4a5-37cee2e4c177 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.703044] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802379, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.706040] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4226fda-de56-4e55-b19a-1fef1bfe23d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.727404] env[62519]: DEBUG nova.compute.provider_tree [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1558.744134] env[62519]: DEBUG nova.compute.manager [req-612754a7-ecb0-44f4-9fd1-4aa4aa59e8ec req-c13f5c74-0f12-4047-ba08-7f23366ffc42 service nova] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Detach interface failed, port_id=e362cda5-6e64-4311-96a1-880e1f66ab32, reason: Instance f19c860f-736a-4783-8ef5-8262040e53a3 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1558.745983] env[62519]: DEBUG nova.scheduler.client.report [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1558.755010] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802377, 'name': ReconfigVM_Task, 'duration_secs': 0.694545} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.755291] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 24cb49c8-b2ef-4ede-aea6-6e34081beca1/24cb49c8-b2ef-4ede-aea6-6e34081beca1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1558.755906] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37ac82f8-f2cc-4653-93de-b8b9809bcba3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.763174] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1558.763174] env[62519]: value = "task-1802382" [ 1558.763174] env[62519]: _type = "Task" [ 1558.763174] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.771861] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802382, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.955244] env[62519]: DEBUG nova.compute.manager [req-8f6a553d-d03b-41ae-b6ec-07a180bd1e06 req-fcd4ee0a-8f96-4842-8fca-3d9e9b6f96bc service nova] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Received event network-changed-d13d36cf-2bb2-4dde-a6de-51669dc83351 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1558.955430] env[62519]: DEBUG nova.compute.manager [req-8f6a553d-d03b-41ae-b6ec-07a180bd1e06 req-fcd4ee0a-8f96-4842-8fca-3d9e9b6f96bc service nova] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Refreshing instance network info cache due to event network-changed-d13d36cf-2bb2-4dde-a6de-51669dc83351. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1558.955648] env[62519]: DEBUG oslo_concurrency.lockutils [req-8f6a553d-d03b-41ae-b6ec-07a180bd1e06 req-fcd4ee0a-8f96-4842-8fca-3d9e9b6f96bc service nova] Acquiring lock "refresh_cache-d8780c40-0099-4ccc-84ae-72fbb14fa1ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.955785] env[62519]: DEBUG oslo_concurrency.lockutils [req-8f6a553d-d03b-41ae-b6ec-07a180bd1e06 req-fcd4ee0a-8f96-4842-8fca-3d9e9b6f96bc service nova] Acquired lock "refresh_cache-d8780c40-0099-4ccc-84ae-72fbb14fa1ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.955936] env[62519]: DEBUG nova.network.neutron [req-8f6a553d-d03b-41ae-b6ec-07a180bd1e06 req-fcd4ee0a-8f96-4842-8fca-3d9e9b6f96bc service nova] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Refreshing network info cache for port d13d36cf-2bb2-4dde-a6de-51669dc83351 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1559.066916] env[62519]: INFO nova.compute.manager [-] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Took 1.40 seconds to deallocate network for instance. [ 1559.086532] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802380, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.094787] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802381, 'name': CreateVM_Task, 'duration_secs': 0.391177} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.094905] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1559.095909] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.096120] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.096430] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1559.096721] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d0b5353-96c6-4516-b010-f3a88e8e7d0b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.105670] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1559.105670] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e61a37-4653-c30b-15b1-8576e6e6a9ab" [ 1559.105670] env[62519]: _type = "Task" [ 1559.105670] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.119065] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e61a37-4653-c30b-15b1-8576e6e6a9ab, 'name': SearchDatastore_Task, 'duration_secs': 0.010785} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.119065] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.119065] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1559.119237] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.119316] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.119495] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1559.119712] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b4945cb-bf46-4ff6-a4cd-97515e642ea2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.130018] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1559.130237] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1559.131229] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d8a5974-8832-4142-b783-fbb8b5bae9cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.137607] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1559.137607] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a80259-3867-9cd1-088f-75644e0a4cf2" [ 1559.137607] env[62519]: _type = "Task" [ 1559.137607] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.147953] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a80259-3867-9cd1-088f-75644e0a4cf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.198039] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802379, 'name': CreateSnapshot_Task, 'duration_secs': 0.789851} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.198201] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1559.199258] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0477dd16-7d4b-4a5d-b1f9-71840456737a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.252162] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.821s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.256032] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.033s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.256191] env[62519]: DEBUG nova.objects.instance [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lazy-loading 'resources' on Instance uuid 029ee07c-705d-452b-9b14-385d69f2fbbb {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1559.278022] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802382, 'name': Rename_Task, 'duration_secs': 0.150836} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.278345] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1559.279348] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d1d3a2a-c3b2-4c7d-ad10-0bdd1088d1f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.286023] env[62519]: INFO nova.scheduler.client.report [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted allocations for instance 51eaac08-75fd-49f9-9b1a-cc2a2d799634 [ 1559.287711] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1559.287711] env[62519]: value = "task-1802383" [ 1559.287711] env[62519]: _type = "Task" [ 1559.287711] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.297373] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.491942] env[62519]: DEBUG nova.network.neutron [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Successfully updated port: 1cf55e56-b406-4e45-9b63-912d4587f930 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1559.556828] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1559.557082] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373696', 'volume_id': '158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'name': 'volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c61c893f-826b-4874-b253-de6fbffa9e5a', 'attached_at': '', 'detached_at': '', 'volume_id': '158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'serial': '158562d2-c9d2-4053-924e-e9c2c4cb6e65'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1559.557979] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53d4759-63f1-4f24-9314-90f49e99b562 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.577973] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a0b3a1-c1bc-45b9-be17-a4942a291fac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.586064] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802380, 'name': CreateSnapshot_Task, 'duration_secs': 0.670074} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.586064] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1559.586064] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940ac0ae-17fd-4f5e-9f64-7fe326da9ee1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.601807] env[62519]: DEBUG oslo_concurrency.lockutils [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.609322] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65/volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1559.610175] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5b24f68-f5cd-49a4-8522-0920be208972 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.643766] env[62519]: DEBUG oslo_vmware.api [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1559.643766] env[62519]: value = "task-1802384" [ 1559.643766] env[62519]: _type = "Task" [ 1559.643766] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.650802] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a80259-3867-9cd1-088f-75644e0a4cf2, 'name': SearchDatastore_Task, 'duration_secs': 0.016271} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.651939] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-346e0b8d-4575-4e9f-add0-8c35622b00cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.657214] env[62519]: DEBUG oslo_vmware.api [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802384, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.661188] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1559.661188] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bac4f-0f94-9535-6d2d-a8a01efae131" [ 1559.661188] env[62519]: _type = "Task" [ 1559.661188] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.670097] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bac4f-0f94-9535-6d2d-a8a01efae131, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.728452] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1559.728780] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-36118ee9-537a-4276-9adb-9f012dcef05f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.740980] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1559.740980] env[62519]: value = "task-1802385" [ 1559.740980] env[62519]: _type = "Task" [ 1559.740980] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.750334] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802385, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.794984] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3b2ada86-b023-43f6-9978-a85f83bd40ba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "51eaac08-75fd-49f9-9b1a-cc2a2d799634" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.883s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.803820] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802383, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.864725] env[62519]: DEBUG nova.network.neutron [req-8f6a553d-d03b-41ae-b6ec-07a180bd1e06 req-fcd4ee0a-8f96-4842-8fca-3d9e9b6f96bc service nova] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Updated VIF entry in instance network info cache for port d13d36cf-2bb2-4dde-a6de-51669dc83351. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1559.865258] env[62519]: DEBUG nova.network.neutron [req-8f6a553d-d03b-41ae-b6ec-07a180bd1e06 req-fcd4ee0a-8f96-4842-8fca-3d9e9b6f96bc service nova] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Updating instance_info_cache with network_info: [{"id": "d13d36cf-2bb2-4dde-a6de-51669dc83351", "address": "fa:16:3e:bc:fe:0c", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd13d36cf-2b", "ovs_interfaceid": "d13d36cf-2bb2-4dde-a6de-51669dc83351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.998220] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.998220] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.998220] env[62519]: DEBUG nova.network.neutron [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1560.146403] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1560.148461] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f537375d-8078-47d2-a20c-c4819160b099 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.166971] env[62519]: DEBUG oslo_vmware.api [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802384, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.174849] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1560.174849] env[62519]: value = "task-1802387" [ 1560.174849] env[62519]: _type = "Task" [ 1560.174849] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.186337] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bac4f-0f94-9535-6d2d-a8a01efae131, 'name': SearchDatastore_Task, 'duration_secs': 0.013562} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.187193] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.187478] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d8780c40-0099-4ccc-84ae-72fbb14fa1ee/d8780c40-0099-4ccc-84ae-72fbb14fa1ee.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1560.187760] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9fd3f1aa-b1ef-49d9-9d02-e1066aad5021 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.196993] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802387, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.211428] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1560.211428] env[62519]: value = "task-1802388" [ 1560.211428] env[62519]: _type = "Task" [ 1560.211428] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.228614] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802388, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.260044] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802385, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.306288] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802383, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.368552] env[62519]: DEBUG oslo_concurrency.lockutils [req-8f6a553d-d03b-41ae-b6ec-07a180bd1e06 req-fcd4ee0a-8f96-4842-8fca-3d9e9b6f96bc service nova] Releasing lock "refresh_cache-d8780c40-0099-4ccc-84ae-72fbb14fa1ee" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.473892] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985571a7-3cbb-4a34-861b-0b595a272bc5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.484696] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c24885-d7e1-4615-8f98-c4289b18bf69 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.526333] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd92182-8818-4c5e-872b-fc4fa8d37d9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.537040] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5574b45b-aea0-4c70-8057-d57e1e311e52 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.554523] env[62519]: DEBUG nova.compute.provider_tree [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1560.591065] env[62519]: DEBUG nova.network.neutron [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1560.663555] env[62519]: DEBUG oslo_vmware.api [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802384, 'name': ReconfigVM_Task, 'duration_secs': 0.611632} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.663555] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Reconfigured VM instance instance-00000010 to attach disk [datastore1] volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65/volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1560.670871] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1e85e1d-80bc-4272-bf77-00a4953dbfc4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.704847] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802387, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.705372] env[62519]: DEBUG oslo_vmware.api [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1560.705372] env[62519]: value = "task-1802389" [ 1560.705372] env[62519]: _type = "Task" [ 1560.705372] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.722162] env[62519]: DEBUG oslo_vmware.api [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802389, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.729503] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802388, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.757051] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802385, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.806068] env[62519]: DEBUG oslo_vmware.api [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802383, 'name': PowerOnVM_Task, 'duration_secs': 1.391143} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.806408] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1560.806663] env[62519]: INFO nova.compute.manager [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Took 10.21 seconds to spawn the instance on the hypervisor. [ 1560.806854] env[62519]: DEBUG nova.compute.manager [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1560.811444] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5b376c-ca86-48c5-8674-13d4fd0e7a2d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.936296] env[62519]: DEBUG nova.network.neutron [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance_info_cache with network_info: [{"id": "1cf55e56-b406-4e45-9b63-912d4587f930", "address": "fa:16:3e:57:b8:10", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf55e56-b4", "ovs_interfaceid": "1cf55e56-b406-4e45-9b63-912d4587f930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.059862] env[62519]: DEBUG nova.scheduler.client.report [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1561.208630] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802387, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.222413] env[62519]: DEBUG oslo_vmware.api [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802389, 'name': ReconfigVM_Task, 'duration_secs': 0.481345} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.223691] env[62519]: DEBUG nova.compute.manager [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Received event network-vif-plugged-1cf55e56-b406-4e45-9b63-912d4587f930 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1561.223913] env[62519]: DEBUG oslo_concurrency.lockutils [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] Acquiring lock "a1551278-a306-4534-8d8d-3b3a003dde04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.224133] env[62519]: DEBUG oslo_concurrency.lockutils [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] Lock "a1551278-a306-4534-8d8d-3b3a003dde04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.224495] env[62519]: DEBUG oslo_concurrency.lockutils [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] Lock "a1551278-a306-4534-8d8d-3b3a003dde04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.224495] env[62519]: DEBUG nova.compute.manager [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] No waiting events found dispatching network-vif-plugged-1cf55e56-b406-4e45-9b63-912d4587f930 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1561.225058] env[62519]: WARNING nova.compute.manager [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Received unexpected event network-vif-plugged-1cf55e56-b406-4e45-9b63-912d4587f930 for instance with vm_state building and task_state spawning. [ 1561.225260] env[62519]: DEBUG nova.compute.manager [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Received event network-changed-1cf55e56-b406-4e45-9b63-912d4587f930 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1561.225499] env[62519]: DEBUG nova.compute.manager [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Refreshing instance network info cache due to event network-changed-1cf55e56-b406-4e45-9b63-912d4587f930. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1561.225690] env[62519]: DEBUG oslo_concurrency.lockutils [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] Acquiring lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.227280] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373696', 'volume_id': '158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'name': 'volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c61c893f-826b-4874-b253-de6fbffa9e5a', 'attached_at': '', 'detached_at': '', 'volume_id': '158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'serial': '158562d2-c9d2-4053-924e-e9c2c4cb6e65'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1561.235300] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802388, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.813403} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.235300] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d8780c40-0099-4ccc-84ae-72fbb14fa1ee/d8780c40-0099-4ccc-84ae-72fbb14fa1ee.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1561.235300] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1561.235521] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99a693ba-0bc0-4eec-b84a-6a0e01e15905 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.250157] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1561.250157] env[62519]: value = "task-1802390" [ 1561.250157] env[62519]: _type = "Task" [ 1561.250157] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.257734] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802385, 'name': CloneVM_Task} progress is 95%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.265157] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802390, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.335232] env[62519]: INFO nova.compute.manager [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Took 46.66 seconds to build instance. [ 1561.442022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.442022] env[62519]: DEBUG nova.compute.manager [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Instance network_info: |[{"id": "1cf55e56-b406-4e45-9b63-912d4587f930", "address": "fa:16:3e:57:b8:10", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf55e56-b4", "ovs_interfaceid": "1cf55e56-b406-4e45-9b63-912d4587f930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1561.442022] env[62519]: DEBUG oslo_concurrency.lockutils [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] Acquired lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.442022] env[62519]: DEBUG nova.network.neutron [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Refreshing network info cache for port 1cf55e56-b406-4e45-9b63-912d4587f930 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1561.442022] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:b8:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cf55e56-b406-4e45-9b63-912d4587f930', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1561.450717] env[62519]: DEBUG oslo.service.loopingcall [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.452079] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1561.452498] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5da0c4a-4e4a-4543-8208-3994ead5c04c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.476270] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1561.476270] env[62519]: value = "task-1802391" [ 1561.476270] env[62519]: _type = "Task" [ 1561.476270] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.486189] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802391, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.566159] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.310s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.568931] env[62519]: DEBUG oslo_concurrency.lockutils [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.730s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.568931] env[62519]: DEBUG nova.objects.instance [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lazy-loading 'resources' on Instance uuid 2b391628-18a2-4606-8c59-58ba642cee50 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1561.588316] env[62519]: INFO nova.scheduler.client.report [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Deleted allocations for instance 029ee07c-705d-452b-9b14-385d69f2fbbb [ 1561.706771] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802387, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.767989] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802390, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093068} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.773175] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1561.773453] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802385, 'name': CloneVM_Task, 'duration_secs': 1.872795} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.774244] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856527f7-916a-45f1-a79b-75889b5889a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.780196] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Created linked-clone VM from snapshot [ 1561.780196] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25a47ac-72ee-4296-a2f0-1aebe44b85e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.809991] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] d8780c40-0099-4ccc-84ae-72fbb14fa1ee/d8780c40-0099-4ccc-84ae-72fbb14fa1ee.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1561.815701] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74da3c5f-e9a3-473c-bd8d-dc8a079c7cd8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.836049] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Uploading image a92e4537-8ad4-4d13-800c-5431b02914af {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1561.837591] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.837906] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.846617] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1561.846617] env[62519]: value = "task-1802392" [ 1561.846617] env[62519]: _type = "Task" [ 1561.846617] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.859477] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.862405] env[62519]: DEBUG oslo_vmware.rw_handles [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1561.862405] env[62519]: value = "vm-373700" [ 1561.862405] env[62519]: _type = "VirtualMachine" [ 1561.862405] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1561.862840] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8b6be9a5-22b7-411d-b8f9-4b935b90f368 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.872040] env[62519]: DEBUG oslo_vmware.rw_handles [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lease: (returnval){ [ 1561.872040] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e2de84-e44f-7ce3-57cb-c965ec802399" [ 1561.872040] env[62519]: _type = "HttpNfcLease" [ 1561.872040] env[62519]: } obtained for exporting VM: (result){ [ 1561.872040] env[62519]: value = "vm-373700" [ 1561.872040] env[62519]: _type = "VirtualMachine" [ 1561.872040] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1561.872442] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the lease: (returnval){ [ 1561.872442] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e2de84-e44f-7ce3-57cb-c965ec802399" [ 1561.872442] env[62519]: _type = "HttpNfcLease" [ 1561.872442] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1561.882900] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1561.882900] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e2de84-e44f-7ce3-57cb-c965ec802399" [ 1561.882900] env[62519]: _type = "HttpNfcLease" [ 1561.882900] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1561.986306] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802391, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.097085] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d69cb494-e284-4152-aa11-3bbc61cb4189 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "029ee07c-705d-452b-9b14-385d69f2fbbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.246s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.209123] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802387, 'name': CloneVM_Task} progress is 95%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.285719] env[62519]: DEBUG nova.objects.instance [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lazy-loading 'flavor' on Instance uuid c61c893f-826b-4874-b253-de6fbffa9e5a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1562.332340] env[62519]: DEBUG nova.network.neutron [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updated VIF entry in instance network info cache for port 1cf55e56-b406-4e45-9b63-912d4587f930. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1562.332789] env[62519]: DEBUG nova.network.neutron [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance_info_cache with network_info: [{"id": "1cf55e56-b406-4e45-9b63-912d4587f930", "address": "fa:16:3e:57:b8:10", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf55e56-b4", "ovs_interfaceid": "1cf55e56-b406-4e45-9b63-912d4587f930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.362105] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802392, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.381529] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1562.381529] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e2de84-e44f-7ce3-57cb-c965ec802399" [ 1562.381529] env[62519]: _type = "HttpNfcLease" [ 1562.381529] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1562.381833] env[62519]: DEBUG oslo_vmware.rw_handles [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1562.381833] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e2de84-e44f-7ce3-57cb-c965ec802399" [ 1562.381833] env[62519]: _type = "HttpNfcLease" [ 1562.381833] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1562.382598] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f891547b-e8c6-4285-bbc4-a2365a82cee0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.394766] env[62519]: DEBUG oslo_vmware.rw_handles [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cc16c9-945a-df38-d2ec-e61f75ee6b08/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1562.394766] env[62519]: DEBUG oslo_vmware.rw_handles [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cc16c9-945a-df38-d2ec-e61f75ee6b08/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1562.494775] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802391, 'name': CreateVM_Task, 'duration_secs': 0.755535} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.495067] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1562.495862] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.496043] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.496366] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1562.496708] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb2f33a2-50a2-4346-8038-647e70b6fdee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.505575] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1562.505575] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]528726ac-dc1c-7081-90b2-876f656410f1" [ 1562.505575] env[62519]: _type = "Task" [ 1562.505575] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.514334] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528726ac-dc1c-7081-90b2-876f656410f1, 'name': SearchDatastore_Task} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.514613] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.514912] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1562.515095] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.515252] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.515425] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1562.517905] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0881a47f-26eb-41e9-aa78-b54f81396ccb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.527293] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1562.527514] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1562.528245] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-587b163c-07f6-4000-876b-7dcd0703b2b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.533778] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1562.533778] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5244ab43-a94b-0569-3a38-3685e1702433" [ 1562.533778] env[62519]: _type = "Task" [ 1562.533778] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.545243] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5244ab43-a94b-0569-3a38-3685e1702433, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.592566] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d442a815-c3b4-4ec5-8e64-e2b984b89ed4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.685850] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d28901-e98e-4db4-8b92-8c6c808c8f7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.695029] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adaa1cbc-3784-44b5-99db-bcf3c5c6774e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.707291] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802387, 'name': CloneVM_Task, 'duration_secs': 2.193642} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.733245] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Created linked-clone VM from snapshot [ 1562.734280] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a6a692-6580-4c4d-83cd-a947c3aa938c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.737386] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941b47f0-de8c-4247-b03e-9216c5093f8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.750528] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a82859a-4e18-4d24-9b2d-7510f5868ede {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.756326] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Uploading image c9199067-5b3c-428f-8215-64d5188a0c37 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1562.770390] env[62519]: DEBUG nova.compute.provider_tree [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1562.781913] env[62519]: DEBUG oslo_vmware.rw_handles [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1562.781913] env[62519]: value = "vm-373702" [ 1562.781913] env[62519]: _type = "VirtualMachine" [ 1562.781913] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1562.782833] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ed172d44-5605-4a96-9c69-f5e1f49fa1d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.791385] env[62519]: DEBUG oslo_concurrency.lockutils [None req-075439ca-d8c3-416a-962f-d9ac15fdb3e2 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.357s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.793833] env[62519]: DEBUG oslo_vmware.rw_handles [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lease: (returnval){ [ 1562.793833] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5261c1e0-4580-25b3-2af4-3e4f6167e0ac" [ 1562.793833] env[62519]: _type = "HttpNfcLease" [ 1562.793833] env[62519]: } obtained for exporting VM: (result){ [ 1562.793833] env[62519]: value = "vm-373702" [ 1562.793833] env[62519]: _type = "VirtualMachine" [ 1562.793833] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1562.793833] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the lease: (returnval){ [ 1562.793833] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5261c1e0-4580-25b3-2af4-3e4f6167e0ac" [ 1562.793833] env[62519]: _type = "HttpNfcLease" [ 1562.793833] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1562.802284] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1562.802284] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5261c1e0-4580-25b3-2af4-3e4f6167e0ac" [ 1562.802284] env[62519]: _type = "HttpNfcLease" [ 1562.802284] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1562.836149] env[62519]: DEBUG oslo_concurrency.lockutils [req-d2af92b5-4efb-45c1-8335-5069afca8617 req-d4098585-1b4f-4814-86ab-2fd0b2a56674 service nova] Releasing lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.844527] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d66de93a-aada-4f3a-931f-5f8ddbb5f1e6 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.232s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.863031] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802392, 'name': ReconfigVM_Task, 'duration_secs': 0.6448} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.863924] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Reconfigured VM instance instance-0000002b to attach disk [datastore1] d8780c40-0099-4ccc-84ae-72fbb14fa1ee/d8780c40-0099-4ccc-84ae-72fbb14fa1ee.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1562.865133] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e514c339-6d1c-48e5-a65e-bef87a4650a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.877984] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1562.877984] env[62519]: value = "task-1802396" [ 1562.877984] env[62519]: _type = "Task" [ 1562.877984] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.891338] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802396, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.048462] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5244ab43-a94b-0569-3a38-3685e1702433, 'name': SearchDatastore_Task, 'duration_secs': 0.016024} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.049690] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef8ec9a4-a3e3-4cd7-aa7f-9b970b4cc639 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.057439] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1563.057439] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521629b0-2667-0b58-ba74-40db6668d805" [ 1563.057439] env[62519]: _type = "Task" [ 1563.057439] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.068453] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521629b0-2667-0b58-ba74-40db6668d805, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.278352] env[62519]: DEBUG nova.scheduler.client.report [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1563.311571] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1563.311571] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5261c1e0-4580-25b3-2af4-3e4f6167e0ac" [ 1563.311571] env[62519]: _type = "HttpNfcLease" [ 1563.311571] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1563.312583] env[62519]: DEBUG oslo_vmware.rw_handles [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1563.312583] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5261c1e0-4580-25b3-2af4-3e4f6167e0ac" [ 1563.312583] env[62519]: _type = "HttpNfcLease" [ 1563.312583] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1563.313792] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f059b844-36f8-4d17-bd52-2658980989c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.326326] env[62519]: DEBUG oslo_vmware.rw_handles [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb5ba8-516d-8ff6-7d23-fda0206caf65/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1563.326913] env[62519]: DEBUG oslo_vmware.rw_handles [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb5ba8-516d-8ff6-7d23-fda0206caf65/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1563.387382] env[62519]: DEBUG nova.compute.manager [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1563.409375] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802396, 'name': Rename_Task, 'duration_secs': 0.176588} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.410175] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1563.410651] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b63d384f-11fd-4e94-a15a-0795ce8ffdaf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.421191] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1563.421191] env[62519]: value = "task-1802397" [ 1563.421191] env[62519]: _type = "Task" [ 1563.421191] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.430852] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802397, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.442321] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c29f21f2-374e-4dcd-8afe-48636ad51727 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.530200] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "c61c893f-826b-4874-b253-de6fbffa9e5a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.530200] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.570349] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521629b0-2667-0b58-ba74-40db6668d805, 'name': SearchDatastore_Task, 'duration_secs': 0.010499} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.570652] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.570947] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] a1551278-a306-4534-8d8d-3b3a003dde04/a1551278-a306-4534-8d8d-3b3a003dde04.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1563.571627] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afd2e9ff-82f2-4de2-b259-9896884ecbe6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.581371] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1563.581371] env[62519]: value = "task-1802398" [ 1563.581371] env[62519]: _type = "Task" [ 1563.581371] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.592200] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.786263] env[62519]: DEBUG oslo_concurrency.lockutils [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.217s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.789048] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.737s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.791056] env[62519]: INFO nova.compute.claims [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1563.816656] env[62519]: INFO nova.scheduler.client.report [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Deleted allocations for instance 2b391628-18a2-4606-8c59-58ba642cee50 [ 1563.915416] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.935156] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802397, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.033871] env[62519]: INFO nova.compute.manager [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Detaching volume 158562d2-c9d2-4053-924e-e9c2c4cb6e65 [ 1564.077715] env[62519]: INFO nova.virt.block_device [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Attempting to driver detach volume 158562d2-c9d2-4053-924e-e9c2c4cb6e65 from mountpoint /dev/sdb [ 1564.077993] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1564.078365] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373696', 'volume_id': '158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'name': 'volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c61c893f-826b-4874-b253-de6fbffa9e5a', 'attached_at': '', 'detached_at': '', 'volume_id': '158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'serial': '158562d2-c9d2-4053-924e-e9c2c4cb6e65'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1564.079626] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88477f72-5d50-4f74-8632-e31a5c6f29b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.096587] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802398, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.117628] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7261ff06-ac24-46a8-aac3-0d781f91a12e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.130492] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3cb41b-4600-4bab-9e53-87dd10dc10d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.156182] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fc2832-1be9-4918-bb3c-1537c6afa9f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.178311] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] The volume has not been displaced from its original location: [datastore1] volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65/volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1564.184099] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Reconfiguring VM instance instance-00000010 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1564.184637] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c3a9603-9cf7-4144-b530-1abdd709be3f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.212246] env[62519]: DEBUG oslo_vmware.api [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1564.212246] env[62519]: value = "task-1802400" [ 1564.212246] env[62519]: _type = "Task" [ 1564.212246] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.227482] env[62519]: DEBUG oslo_vmware.api [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802400, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.328498] env[62519]: DEBUG oslo_concurrency.lockutils [None req-46f36def-b286-445c-9c6e-71911fda02f6 tempest-ServersV294TestFqdnHostnames-312089260 tempest-ServersV294TestFqdnHostnames-312089260-project-member] Lock "2b391628-18a2-4606-8c59-58ba642cee50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.298s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.432666] env[62519]: DEBUG oslo_vmware.api [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802397, 'name': PowerOnVM_Task, 'duration_secs': 0.845814} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.433122] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1564.433439] env[62519]: INFO nova.compute.manager [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Took 8.80 seconds to spawn the instance on the hypervisor. [ 1564.433697] env[62519]: DEBUG nova.compute.manager [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1564.434693] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da84a4ed-b944-4bd8-a284-ee1100c53cfa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.443146] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "8070aa59-3547-460a-b914-0e84620023d0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.445339] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "8070aa59-3547-460a-b914-0e84620023d0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.445339] env[62519]: DEBUG nova.compute.manager [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1564.450316] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b50ad8-25e4-43bc-83f3-ed58e0780eef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.456336] env[62519]: DEBUG nova.compute.manager [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1564.458507] env[62519]: DEBUG nova.objects.instance [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lazy-loading 'flavor' on Instance uuid 8070aa59-3547-460a-b914-0e84620023d0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1564.600192] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802398, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577596} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.600548] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] a1551278-a306-4534-8d8d-3b3a003dde04/a1551278-a306-4534-8d8d-3b3a003dde04.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1564.600824] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1564.601182] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8999ecd2-12e5-41e1-8342-4279a5fe2c65 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.610791] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1564.610791] env[62519]: value = "task-1802401" [ 1564.610791] env[62519]: _type = "Task" [ 1564.610791] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.626743] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.723722] env[62519]: DEBUG oslo_vmware.api [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802400, 'name': ReconfigVM_Task, 'duration_secs': 0.35658} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.724138] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Reconfigured VM instance instance-00000010 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1564.729551] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33f18eb1-6987-4e5f-a711-88c8af032b87 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.748452] env[62519]: DEBUG oslo_vmware.api [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1564.748452] env[62519]: value = "task-1802402" [ 1564.748452] env[62519]: _type = "Task" [ 1564.748452] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.758467] env[62519]: DEBUG oslo_vmware.api [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.965249] env[62519]: INFO nova.compute.manager [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Took 45.30 seconds to build instance. [ 1565.125474] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074336} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.128472] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1565.132964] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83563a21-fe02-46cb-833e-b9996c2fd9b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.159207] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] a1551278-a306-4534-8d8d-3b3a003dde04/a1551278-a306-4534-8d8d-3b3a003dde04.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1565.162347] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5390d2d7-2cd7-47ff-97af-1bcaaa826027 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.186754] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1565.186754] env[62519]: value = "task-1802403" [ 1565.186754] env[62519]: _type = "Task" [ 1565.186754] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.201025] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802403, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.259223] env[62519]: DEBUG oslo_vmware.api [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802402, 'name': ReconfigVM_Task, 'duration_secs': 0.200856} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.259619] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373696', 'volume_id': '158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'name': 'volume-158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c61c893f-826b-4874-b253-de6fbffa9e5a', 'attached_at': '', 'detached_at': '', 'volume_id': '158562d2-c9d2-4053-924e-e9c2c4cb6e65', 'serial': '158562d2-c9d2-4053-924e-e9c2c4cb6e65'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1565.371196] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cfd050-b2b1-4989-83f1-c8cf29bc97cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.380080] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fed393a-ef6b-436b-9e61-a4a4f12d9f2c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.413308] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47ff4f4-c16e-4708-97ed-dfe56fe02a0a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.422424] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850ef7fb-aecf-4e20-9373-2063e19896b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.437842] env[62519]: DEBUG nova.compute.provider_tree [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1565.473127] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1565.473127] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa1e6ae0-ce6b-4215-9786-7f904d08f084 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.481271] env[62519]: DEBUG oslo_vmware.api [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1565.481271] env[62519]: value = "task-1802404" [ 1565.481271] env[62519]: _type = "Task" [ 1565.481271] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.492187] env[62519]: DEBUG oslo_vmware.api [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.699980] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.851506] env[62519]: DEBUG nova.objects.instance [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lazy-loading 'flavor' on Instance uuid c61c893f-826b-4874-b253-de6fbffa9e5a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1565.943092] env[62519]: DEBUG nova.scheduler.client.report [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1565.998713] env[62519]: DEBUG oslo_vmware.api [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802404, 'name': PowerOffVM_Task, 'duration_secs': 0.344709} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.999245] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1565.999643] env[62519]: DEBUG nova.compute.manager [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1566.000883] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564384bc-ac55-447f-bcad-95328f8b438a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.203482] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.446985] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.447661] env[62519]: DEBUG nova.compute.manager [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1566.450684] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.363s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.450910] env[62519]: DEBUG nova.objects.instance [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lazy-loading 'resources' on Instance uuid 91902e7f-8c15-447b-a3a8-04433434b1b6 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1566.475302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7525886b-c35e-4300-bd7c-cb7c69519285 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.233s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.518564] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a77564dc-93ee-4bfb-b5f3-39cf51dc2528 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "8070aa59-3547-460a-b914-0e84620023d0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.075s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.701070] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802403, 'name': ReconfigVM_Task, 'duration_secs': 1.217465} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.701386] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfigured VM instance instance-0000002c to attach disk [datastore1] a1551278-a306-4534-8d8d-3b3a003dde04/a1551278-a306-4534-8d8d-3b3a003dde04.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1566.702034] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7de66f2d-b7f9-46e3-92c9-ef1e84a730c3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.710765] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1566.710765] env[62519]: value = "task-1802406" [ 1566.710765] env[62519]: _type = "Task" [ 1566.710765] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.721852] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802406, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.861392] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0faa435d-2a45-4d3f-8816-480eac7f98db tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.331s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.955295] env[62519]: DEBUG nova.compute.utils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1566.959824] env[62519]: DEBUG nova.compute.manager [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1566.960062] env[62519]: DEBUG nova.network.neutron [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1566.978312] env[62519]: DEBUG nova.compute.manager [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1567.018686] env[62519]: DEBUG nova.policy [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '650ef7a8a33a4ebe89732b387d4fb620', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dde025495cf3419db731c5299f90879d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1567.221912] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802406, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.441891] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4570fe-c2b0-4dfb-b08b-462090cbf2fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.453812] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4510ce91-a789-4c8c-bdfa-77ae91944db2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.460644] env[62519]: DEBUG nova.compute.manager [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1567.500314] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a93ce9-c92a-4a84-a8b3-509402b7d3f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.508058] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6988df-94ed-4200-aa1a-1bf46fccf0e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.528049] env[62519]: DEBUG nova.compute.provider_tree [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1567.530501] env[62519]: DEBUG nova.network.neutron [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Successfully created port: ec32b319-d783-4ebf-997e-88d3a805675f {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1567.533279] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.722390] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802406, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.035515] env[62519]: DEBUG nova.scheduler.client.report [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1568.223562] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802406, 'name': Rename_Task, 'duration_secs': 1.288224} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.223917] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1568.224231] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79e37613-a3a8-4eb3-9654-556ece1668ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.232774] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1568.232774] env[62519]: value = "task-1802407" [ 1568.232774] env[62519]: _type = "Task" [ 1568.232774] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.242025] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802407, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.500967] env[62519]: DEBUG nova.compute.manager [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1568.541742] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.091s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.544393] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 29.189s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.544593] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.544751] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1568.545068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.727s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.545292] env[62519]: DEBUG nova.objects.instance [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lazy-loading 'resources' on Instance uuid 4c336ad1-8ce6-4f89-843e-0baae0d0dbda {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1568.547158] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441d5f5f-cd4b-4167-aedf-5c8b198943df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.559472] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c6aac0-291a-47a9-bfc2-ae6699ba1ddb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.568457] env[62519]: INFO nova.scheduler.client.report [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Deleted allocations for instance 91902e7f-8c15-447b-a3a8-04433434b1b6 [ 1568.580625] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a447af2a-b982-4801-898e-13984833eecc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.590424] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea68506-de08-4f9c-8fb3-fd27a031a8d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.625829] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179043MB free_disk=157GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1568.626056] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.743718] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802407, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.062788] env[62519]: DEBUG nova.network.neutron [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Successfully updated port: ec32b319-d783-4ebf-997e-88d3a805675f {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1569.090806] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5a4e29f-dafc-4df8-a00f-bb9bcadbb03e tempest-VolumesAssistedSnapshotsTest-904167312 tempest-VolumesAssistedSnapshotsTest-904167312-project-member] Lock "91902e7f-8c15-447b-a3a8-04433434b1b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.285s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.245205] env[62519]: DEBUG oslo_vmware.api [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802407, 'name': PowerOnVM_Task, 'duration_secs': 0.835644} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.248047] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1569.248292] env[62519]: INFO nova.compute.manager [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Took 10.80 seconds to spawn the instance on the hypervisor. [ 1569.248472] env[62519]: DEBUG nova.compute.manager [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1569.249531] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebed8f46-8acf-4c9b-8999-e148ee40f801 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.507469] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf80965a-d78b-4191-9991-cdab79ca39b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.515980] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02bff99-9625-417f-a54a-40f6896d6399 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.549217] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4402cab-b35c-46c0-9a50-0c47008cfbb2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.559100] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7040e60-4aa3-4a46-aba5-6e98a735e131 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.565198] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquiring lock "refresh_cache-1b4efc0f-474f-4a32-b199-c14f27b183e2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.565353] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquired lock "refresh_cache-1b4efc0f-474f-4a32-b199-c14f27b183e2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.565590] env[62519]: DEBUG nova.network.neutron [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1569.578374] env[62519]: DEBUG nova.compute.provider_tree [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1569.771067] env[62519]: INFO nova.compute.manager [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Took 47.77 seconds to build instance. [ 1570.081909] env[62519]: DEBUG nova.scheduler.client.report [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1570.099671] env[62519]: DEBUG nova.network.neutron [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1570.240204] env[62519]: DEBUG nova.network.neutron [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Updating instance_info_cache with network_info: [{"id": "ec32b319-d783-4ebf-997e-88d3a805675f", "address": "fa:16:3e:a3:dc:c4", "network": {"id": "bf224c23-ba0a-4aeb-9499-d655798b9ef9", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-432746111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dde025495cf3419db731c5299f90879d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec32b319-d7", "ovs_interfaceid": "ec32b319-d783-4ebf-997e-88d3a805675f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.587490] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.042s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.590414] env[62519]: DEBUG oslo_concurrency.lockutils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.627s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.592220] env[62519]: INFO nova.compute.claims [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1570.609951] env[62519]: INFO nova.scheduler.client.report [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Deleted allocations for instance 4c336ad1-8ce6-4f89-843e-0baae0d0dbda [ 1570.743493] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Releasing lock "refresh_cache-1b4efc0f-474f-4a32-b199-c14f27b183e2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.743829] env[62519]: DEBUG nova.compute.manager [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Instance network_info: |[{"id": "ec32b319-d783-4ebf-997e-88d3a805675f", "address": "fa:16:3e:a3:dc:c4", "network": {"id": "bf224c23-ba0a-4aeb-9499-d655798b9ef9", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-432746111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dde025495cf3419db731c5299f90879d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec32b319-d7", "ovs_interfaceid": "ec32b319-d783-4ebf-997e-88d3a805675f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1571.117445] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a75dcbb4-805f-4659-9cde-d52941f8a0cc tempest-ServersTestJSON-192126600 tempest-ServersTestJSON-192126600-project-member] Lock "4c336ad1-8ce6-4f89-843e-0baae0d0dbda" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.420s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.277251] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a3f141b-7806-43a2-8afd-6877e3266010 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "a1551278-a306-4534-8d8d-3b3a003dde04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.274s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.703878] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1571.704245] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1571.704402] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1571.704641] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1571.704803] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1571.704949] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1571.705204] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1571.705366] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1571.705530] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1571.705688] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1571.706095] env[62519]: DEBUG nova.virt.hardware [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1571.708450] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319cdd8a-8125-4ea3-8ee2-438a5977e9af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.723035] env[62519]: DEBUG oslo_vmware.rw_handles [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cc16c9-945a-df38-d2ec-e61f75ee6b08/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1571.724108] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543798c0-db2e-4106-af68-f59fbb122af0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.729098] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb2620f-994a-47d9-9e2b-47f85bb691c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.747594] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:dc:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec32b319-d783-4ebf-997e-88d3a805675f', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1571.755908] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Creating folder: Project (dde025495cf3419db731c5299f90879d). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1571.760588] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01356775-41ec-48c1-b1cb-7eaae10165e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.762387] env[62519]: DEBUG oslo_vmware.rw_handles [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cc16c9-945a-df38-d2ec-e61f75ee6b08/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1571.762618] env[62519]: ERROR oslo_vmware.rw_handles [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cc16c9-945a-df38-d2ec-e61f75ee6b08/disk-0.vmdk due to incomplete transfer. [ 1571.763380] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4308cf8b-94a4-452c-98c0-6b77c195caa5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.772311] env[62519]: DEBUG oslo_vmware.rw_handles [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cc16c9-945a-df38-d2ec-e61f75ee6b08/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1571.772537] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Uploaded image a92e4537-8ad4-4d13-800c-5431b02914af to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1571.775305] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1571.779877] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-18500ec4-a2df-4912-964d-f1e7b9e58ebb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.781553] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Created folder: Project (dde025495cf3419db731c5299f90879d) in parent group-v373567. [ 1571.781734] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Creating folder: Instances. Parent ref: group-v373704. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1571.782099] env[62519]: DEBUG nova.compute.manager [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1571.784800] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f9f523d-da78-45d2-a5ac-5d1b17daaa9d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.792816] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1571.792816] env[62519]: value = "task-1802410" [ 1571.792816] env[62519]: _type = "Task" [ 1571.792816] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.800865] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Created folder: Instances in parent group-v373704. [ 1571.801144] env[62519]: DEBUG oslo.service.loopingcall [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.801365] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1571.801608] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d43ba9cb-b318-4e2f-8993-15230f62511e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.823615] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802410, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.830558] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1571.830558] env[62519]: value = "task-1802411" [ 1571.830558] env[62519]: _type = "Task" [ 1571.830558] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.843095] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802411, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.110861] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5ab401-be12-4ec3-89e8-0763e65c6057 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.120033] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4494ab-3a1d-4747-a845-8157e08d044d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.158745] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417544af-e136-4f3a-9c44-76fb594623c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.170562] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23eae58f-2d89-4d76-9c71-8de7d2b12237 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.193086] env[62519]: DEBUG nova.compute.provider_tree [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1572.310497] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802410, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.313731] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.345335] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802411, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.502379] env[62519]: DEBUG nova.compute.manager [req-3702b6c4-d068-4182-be91-8187b02370eb req-669a1e32-2417-4b57-bdea-fbd57b6b91d9 service nova] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Received event network-vif-plugged-ec32b319-d783-4ebf-997e-88d3a805675f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1572.502847] env[62519]: DEBUG oslo_concurrency.lockutils [req-3702b6c4-d068-4182-be91-8187b02370eb req-669a1e32-2417-4b57-bdea-fbd57b6b91d9 service nova] Acquiring lock "1b4efc0f-474f-4a32-b199-c14f27b183e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.503240] env[62519]: DEBUG oslo_concurrency.lockutils [req-3702b6c4-d068-4182-be91-8187b02370eb req-669a1e32-2417-4b57-bdea-fbd57b6b91d9 service nova] Lock "1b4efc0f-474f-4a32-b199-c14f27b183e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.503350] env[62519]: DEBUG oslo_concurrency.lockutils [req-3702b6c4-d068-4182-be91-8187b02370eb req-669a1e32-2417-4b57-bdea-fbd57b6b91d9 service nova] Lock "1b4efc0f-474f-4a32-b199-c14f27b183e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.503622] env[62519]: DEBUG nova.compute.manager [req-3702b6c4-d068-4182-be91-8187b02370eb req-669a1e32-2417-4b57-bdea-fbd57b6b91d9 service nova] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] No waiting events found dispatching network-vif-plugged-ec32b319-d783-4ebf-997e-88d3a805675f {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1572.503885] env[62519]: WARNING nova.compute.manager [req-3702b6c4-d068-4182-be91-8187b02370eb req-669a1e32-2417-4b57-bdea-fbd57b6b91d9 service nova] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Received unexpected event network-vif-plugged-ec32b319-d783-4ebf-997e-88d3a805675f for instance with vm_state building and task_state spawning. [ 1572.696408] env[62519]: DEBUG nova.scheduler.client.report [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1572.808596] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802410, 'name': Destroy_Task, 'duration_secs': 0.958171} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.809012] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Destroyed the VM [ 1572.809353] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1572.809447] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-caa56203-f36e-41db-821e-473d95efd53d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.819211] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1572.819211] env[62519]: value = "task-1802412" [ 1572.819211] env[62519]: _type = "Task" [ 1572.819211] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.828841] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802412, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.843496] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802411, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.164268] env[62519]: DEBUG oslo_vmware.rw_handles [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb5ba8-516d-8ff6-7d23-fda0206caf65/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1573.165663] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbfd5e3-bd95-401d-a3a1-99ec83ce6e3f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.176893] env[62519]: DEBUG oslo_vmware.rw_handles [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb5ba8-516d-8ff6-7d23-fda0206caf65/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1573.177122] env[62519]: ERROR oslo_vmware.rw_handles [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb5ba8-516d-8ff6-7d23-fda0206caf65/disk-0.vmdk due to incomplete transfer. [ 1573.177410] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3f62a30e-4f9b-440f-b092-3ede4cf55ac8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.187365] env[62519]: DEBUG oslo_vmware.rw_handles [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb5ba8-516d-8ff6-7d23-fda0206caf65/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1573.188068] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Uploaded image c9199067-5b3c-428f-8215-64d5188a0c37 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1573.190242] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1573.190508] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0407b844-a353-4641-a0b9-f6a099c7c076 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.199661] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1573.199661] env[62519]: value = "task-1802413" [ 1573.199661] env[62519]: _type = "Task" [ 1573.199661] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.204612] env[62519]: DEBUG oslo_concurrency.lockutils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.205312] env[62519]: DEBUG nova.compute.manager [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1573.208651] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.657s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.210670] env[62519]: INFO nova.compute.claims [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1573.219930] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802413, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.331317] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802412, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.344936] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802411, 'name': CreateVM_Task, 'duration_secs': 1.506983} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.345160] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1573.346021] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.346330] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.346557] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1573.346905] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2aea1038-f74e-4ba3-a6e5-b4d150989a26 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.353987] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1573.353987] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d7d60c-f924-5285-adab-96d13887b01a" [ 1573.353987] env[62519]: _type = "Task" [ 1573.353987] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.364613] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d7d60c-f924-5285-adab-96d13887b01a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.683736] env[62519]: DEBUG oslo_concurrency.lockutils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Acquiring lock "bace23b3-b7f4-4f3b-8986-0076440d096d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.683736] env[62519]: DEBUG oslo_concurrency.lockutils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "bace23b3-b7f4-4f3b-8986-0076440d096d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.712724] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802413, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.715343] env[62519]: DEBUG nova.compute.utils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1573.721129] env[62519]: DEBUG nova.compute.manager [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1573.721129] env[62519]: DEBUG nova.network.neutron [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1573.825020] env[62519]: DEBUG nova.policy [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fed2ec144f947198aebcaba3ffe6a6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ffa09c2044954d848a83de9c87fff94e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1573.833495] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802412, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.870226] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d7d60c-f924-5285-adab-96d13887b01a, 'name': SearchDatastore_Task, 'duration_secs': 0.019812} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.870226] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.870226] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1573.870226] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.870226] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.870226] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1573.870226] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b490e89-bf13-4fdd-94ca-fb17ae2ce577 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.882999] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1573.883556] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1573.886383] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75e0125b-cf08-48c1-a96c-630f5e7dbd2e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.895024] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1573.895024] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a3dd97-d210-b32a-67c5-9ea9aee50dd5" [ 1573.895024] env[62519]: _type = "Task" [ 1573.895024] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.903427] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a3dd97-d210-b32a-67c5-9ea9aee50dd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.967017] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "ed716912-752e-4c6d-b6c6-fb349668fa93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.967017] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "ed716912-752e-4c6d-b6c6-fb349668fa93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.996172] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.998040] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.000634] env[62519]: DEBUG nova.objects.instance [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lazy-loading 'flavor' on Instance uuid 8070aa59-3547-460a-b914-0e84620023d0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1574.217580] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802413, 'name': Destroy_Task, 'duration_secs': 0.770756} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.218446] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Destroyed the VM [ 1574.218671] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1574.219158] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-48be3f4b-a211-4b6f-b7cd-78e144b968aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.221402] env[62519]: DEBUG nova.compute.manager [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1574.236407] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1574.236407] env[62519]: value = "task-1802414" [ 1574.236407] env[62519]: _type = "Task" [ 1574.236407] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.251067] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802414, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.333324] env[62519]: DEBUG oslo_vmware.api [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802412, 'name': RemoveSnapshot_Task, 'duration_secs': 1.514621} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.333868] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1574.334369] env[62519]: INFO nova.compute.manager [None req-37d16fe4-3717-4983-a91a-6f9d45bf9fb5 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Took 16.73 seconds to snapshot the instance on the hypervisor. [ 1574.412051] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a3dd97-d210-b32a-67c5-9ea9aee50dd5, 'name': SearchDatastore_Task, 'duration_secs': 0.041868} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.413870] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9710a11-9c0a-4900-94e1-287dd4cb59fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.421045] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1574.421045] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52741675-413d-f3bf-7b15-a9915fb83e0c" [ 1574.421045] env[62519]: _type = "Task" [ 1574.421045] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.433078] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52741675-413d-f3bf-7b15-a9915fb83e0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.466480] env[62519]: DEBUG nova.network.neutron [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Successfully created port: 53564e7d-5a36-43d6-a79a-d13c12623d01 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1574.501965] env[62519]: DEBUG nova.compute.utils [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1574.505534] env[62519]: DEBUG oslo_concurrency.lockutils [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "refresh_cache-8070aa59-3547-460a-b914-0e84620023d0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.505703] env[62519]: DEBUG oslo_concurrency.lockutils [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquired lock "refresh_cache-8070aa59-3547-460a-b914-0e84620023d0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.505872] env[62519]: DEBUG nova.network.neutron [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1574.506052] env[62519]: DEBUG nova.objects.instance [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lazy-loading 'info_cache' on Instance uuid 8070aa59-3547-460a-b914-0e84620023d0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1574.734157] env[62519]: INFO nova.virt.block_device [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Booting with volume 9692c2cd-3d11-41b6-88ba-eae1158b8c33 at /dev/sda [ 1574.750432] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802414, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.789963] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c6dd00f-b810-4a10-bcb0-6b2d500b386e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.803257] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330ada3d-9fbd-403b-b95e-966e3d07b4d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.851397] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-faf573bc-0841-49e5-b2fe-41ac3dad2e06 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.854156] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2257d2-7d9c-4b87-95d7-bcbe7bcdfa80 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.864673] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695d9d4c-8bfc-4158-aa70-3baf4ae7dfe1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.870584] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35df0cd-a181-44af-b264-b7a53880644b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.932632] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f7d505-845b-456f-a3fa-238d4cfaf9e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.939507] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a9b803-862a-493b-b793-d522fc45e364 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.949100] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52741675-413d-f3bf-7b15-a9915fb83e0c, 'name': SearchDatastore_Task, 'duration_secs': 0.015668} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.954146] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.954146] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1b4efc0f-474f-4a32-b199-c14f27b183e2/1b4efc0f-474f-4a32-b199-c14f27b183e2.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1574.954689] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-865c49c2-98ca-47e7-a4e0-794bd50e61f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.957670] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf772da6-65b7-4250-8b39-01d39ffc88f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.962226] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1504fe13-bb23-4e80-a841-466e26b06833 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.977304] env[62519]: DEBUG nova.compute.provider_tree [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.983015] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1574.983015] env[62519]: value = "task-1802415" [ 1574.983015] env[62519]: _type = "Task" [ 1574.983015] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.983332] env[62519]: DEBUG nova.virt.block_device [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Updating existing volume attachment record: 0411480b-968e-44bd-ace8-a04da3a97f45 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1574.996765] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802415, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.006056] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.008773] env[62519]: DEBUG nova.objects.base [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Object Instance<8070aa59-3547-460a-b914-0e84620023d0> lazy-loaded attributes: flavor,info_cache {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1575.046462] env[62519]: DEBUG nova.compute.manager [req-0dee5e7f-9559-4c6d-a777-a3079bff8005 req-238a4cbd-2264-4163-b3ed-5e7ccff11d1e service nova] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Received event network-changed-ec32b319-d783-4ebf-997e-88d3a805675f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1575.046462] env[62519]: DEBUG nova.compute.manager [req-0dee5e7f-9559-4c6d-a777-a3079bff8005 req-238a4cbd-2264-4163-b3ed-5e7ccff11d1e service nova] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Refreshing instance network info cache due to event network-changed-ec32b319-d783-4ebf-997e-88d3a805675f. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1575.046462] env[62519]: DEBUG oslo_concurrency.lockutils [req-0dee5e7f-9559-4c6d-a777-a3079bff8005 req-238a4cbd-2264-4163-b3ed-5e7ccff11d1e service nova] Acquiring lock "refresh_cache-1b4efc0f-474f-4a32-b199-c14f27b183e2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.046462] env[62519]: DEBUG oslo_concurrency.lockutils [req-0dee5e7f-9559-4c6d-a777-a3079bff8005 req-238a4cbd-2264-4163-b3ed-5e7ccff11d1e service nova] Acquired lock "refresh_cache-1b4efc0f-474f-4a32-b199-c14f27b183e2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.046462] env[62519]: DEBUG nova.network.neutron [req-0dee5e7f-9559-4c6d-a777-a3079bff8005 req-238a4cbd-2264-4163-b3ed-5e7ccff11d1e service nova] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Refreshing network info cache for port ec32b319-d783-4ebf-997e-88d3a805675f {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1575.254099] env[62519]: DEBUG oslo_vmware.api [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802414, 'name': RemoveSnapshot_Task, 'duration_secs': 1.008605} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.254659] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1575.254835] env[62519]: INFO nova.compute.manager [None req-9ac93790-d8fc-4625-a913-ddc460237e69 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Took 17.27 seconds to snapshot the instance on the hypervisor. [ 1575.487229] env[62519]: DEBUG nova.scheduler.client.report [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1575.507900] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802415, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.996017] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.787s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.996578] env[62519]: DEBUG nova.compute.manager [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1575.999704] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.956s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.001254] env[62519]: INFO nova.compute.claims [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1576.017711] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802415, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.638854} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.017977] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1b4efc0f-474f-4a32-b199-c14f27b183e2/1b4efc0f-474f-4a32-b199-c14f27b183e2.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1576.019014] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1576.019014] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19620baf-693a-4062-8f68-f19a1aa7261a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.035607] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1576.035607] env[62519]: value = "task-1802416" [ 1576.035607] env[62519]: _type = "Task" [ 1576.035607] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.042074] env[62519]: DEBUG nova.network.neutron [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Updating instance_info_cache with network_info: [{"id": "11187365-8def-40f6-822e-05bb089dd16b", "address": "fa:16:3e:54:e8:ea", "network": {"id": "4c7928be-c05e-40d8-8744-53752aa1c802", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1081889914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f69f08ab17cc423a98f0ae56f706c62b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11187365-8d", "ovs_interfaceid": "11187365-8def-40f6-822e-05bb089dd16b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.050942] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802416, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.082334] env[62519]: DEBUG nova.network.neutron [req-0dee5e7f-9559-4c6d-a777-a3079bff8005 req-238a4cbd-2264-4163-b3ed-5e7ccff11d1e service nova] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Updated VIF entry in instance network info cache for port ec32b319-d783-4ebf-997e-88d3a805675f. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1576.082691] env[62519]: DEBUG nova.network.neutron [req-0dee5e7f-9559-4c6d-a777-a3079bff8005 req-238a4cbd-2264-4163-b3ed-5e7ccff11d1e service nova] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Updating instance_info_cache with network_info: [{"id": "ec32b319-d783-4ebf-997e-88d3a805675f", "address": "fa:16:3e:a3:dc:c4", "network": {"id": "bf224c23-ba0a-4aeb-9499-d655798b9ef9", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-432746111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dde025495cf3419db731c5299f90879d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec32b319-d7", "ovs_interfaceid": "ec32b319-d783-4ebf-997e-88d3a805675f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.102986] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.103261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.103484] env[62519]: INFO nova.compute.manager [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Attaching volume 326b2c58-a69e-42e3-a1c0-532dd3c5b724 to /dev/sdb [ 1576.146647] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af4f3b2-0bd5-41bc-86fa-edbaa1270fa4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.157955] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5431bb25-eead-4779-96f0-bc0d6ecbb89b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.176521] env[62519]: DEBUG nova.virt.block_device [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Updating existing volume attachment record: feb39f16-7f8c-42d9-a2ae-261e41e55807 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1576.514160] env[62519]: DEBUG nova.compute.utils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1576.516573] env[62519]: DEBUG nova.compute.manager [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1576.518227] env[62519]: DEBUG nova.network.neutron [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1576.551064] env[62519]: DEBUG oslo_concurrency.lockutils [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Releasing lock "refresh_cache-8070aa59-3547-460a-b914-0e84620023d0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.551422] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802416, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091664} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.551932] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1576.553099] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7e309e-1ade-4743-8d9f-fcd69363bc17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.580989] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 1b4efc0f-474f-4a32-b199-c14f27b183e2/1b4efc0f-474f-4a32-b199-c14f27b183e2.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1576.581959] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-247313f1-7859-4446-a651-306c0bc35c31 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.598218] env[62519]: DEBUG oslo_concurrency.lockutils [req-0dee5e7f-9559-4c6d-a777-a3079bff8005 req-238a4cbd-2264-4163-b3ed-5e7ccff11d1e service nova] Releasing lock "refresh_cache-1b4efc0f-474f-4a32-b199-c14f27b183e2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.609772] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1576.609772] env[62519]: value = "task-1802419" [ 1576.609772] env[62519]: _type = "Task" [ 1576.609772] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.621520] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802419, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.659249] env[62519]: DEBUG nova.policy [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '317da34374674698bc67ec498455a09d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd044d408a8674580b0f5cd52ca6e756d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1576.726018] env[62519]: DEBUG nova.compute.manager [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Stashing vm_state: active {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 1576.798534] env[62519]: DEBUG nova.network.neutron [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Successfully updated port: 53564e7d-5a36-43d6-a79a-d13c12623d01 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1577.021026] env[62519]: DEBUG nova.compute.manager [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1577.107039] env[62519]: DEBUG nova.compute.manager [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1577.108998] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1577.108998] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1577.108998] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1577.108998] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1577.108998] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1577.108998] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1577.109533] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1577.109826] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1577.110425] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1577.114018] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1577.114018] env[62519]: DEBUG nova.virt.hardware [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1577.115242] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3826e365-3243-4234-a737-acba7d875c18 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.132833] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.138974] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d129f50b-7ad0-42a0-9923-565e56632d15 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.235740] env[62519]: DEBUG nova.network.neutron [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Successfully created port: 8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1577.250832] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.301568] env[62519]: DEBUG oslo_concurrency.lockutils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Acquiring lock "refresh_cache-765cf18e-53a0-4cc6-ad0e-337a6f68915c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.301662] env[62519]: DEBUG oslo_concurrency.lockutils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Acquired lock "refresh_cache-765cf18e-53a0-4cc6-ad0e-337a6f68915c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.301736] env[62519]: DEBUG nova.network.neutron [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1577.555354] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1577.555839] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e050a7e-adcc-41cb-971d-9b15b1984f37 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.565707] env[62519]: DEBUG oslo_vmware.api [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1577.565707] env[62519]: value = "task-1802421" [ 1577.565707] env[62519]: _type = "Task" [ 1577.565707] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.584201] env[62519]: DEBUG oslo_vmware.api [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802421, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.625322] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802419, 'name': ReconfigVM_Task, 'duration_secs': 0.857052} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.629219] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 1b4efc0f-474f-4a32-b199-c14f27b183e2/1b4efc0f-474f-4a32-b199-c14f27b183e2.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1577.630963] env[62519]: DEBUG nova.compute.manager [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Received event network-vif-plugged-53564e7d-5a36-43d6-a79a-d13c12623d01 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1577.631182] env[62519]: DEBUG oslo_concurrency.lockutils [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] Acquiring lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.631406] env[62519]: DEBUG oslo_concurrency.lockutils [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] Lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.631542] env[62519]: DEBUG oslo_concurrency.lockutils [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] Lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.631724] env[62519]: DEBUG nova.compute.manager [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] No waiting events found dispatching network-vif-plugged-53564e7d-5a36-43d6-a79a-d13c12623d01 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1577.631913] env[62519]: WARNING nova.compute.manager [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Received unexpected event network-vif-plugged-53564e7d-5a36-43d6-a79a-d13c12623d01 for instance with vm_state building and task_state spawning. [ 1577.632074] env[62519]: DEBUG nova.compute.manager [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Received event network-changed-53564e7d-5a36-43d6-a79a-d13c12623d01 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1577.632227] env[62519]: DEBUG nova.compute.manager [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Refreshing instance network info cache due to event network-changed-53564e7d-5a36-43d6-a79a-d13c12623d01. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1577.632416] env[62519]: DEBUG oslo_concurrency.lockutils [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] Acquiring lock "refresh_cache-765cf18e-53a0-4cc6-ad0e-337a6f68915c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.633118] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31c85140-dbb7-4a51-bbc8-65f9c78d65a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.643269] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1577.643269] env[62519]: value = "task-1802422" [ 1577.643269] env[62519]: _type = "Task" [ 1577.643269] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.660575] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802422, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.665780] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf80c67-b837-4130-ab5d-6d22feec4745 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.675160] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b7bd79-88da-4807-bea1-e21a706a8e34 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.711295] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6dc4412-d4b8-4124-9e7b-d22d9eb82131 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.720281] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372d5d16-f3ad-4b70-aad4-34ba80d720a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.737046] env[62519]: DEBUG nova.compute.provider_tree [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1577.918712] env[62519]: DEBUG nova.network.neutron [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1578.040257] env[62519]: DEBUG nova.compute.manager [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1578.064324] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1578.064583] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1578.064740] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1578.064927] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1578.065082] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1578.065237] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1578.065443] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1578.065597] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1578.065762] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1578.065921] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1578.066101] env[62519]: DEBUG nova.virt.hardware [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1578.069522] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c3da50-9351-4b1c-9d68-883f727d9d4e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.082633] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d686e44d-2d32-40d4-996d-29b12ac2a410 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.088495] env[62519]: DEBUG oslo_vmware.api [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802421, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.152908] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802422, 'name': Rename_Task, 'duration_secs': 0.238652} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.153333] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1578.153487] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0acb79f8-acd4-4bf0-a496-927dbcf97cbc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.161287] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1578.161287] env[62519]: value = "task-1802423" [ 1578.161287] env[62519]: _type = "Task" [ 1578.161287] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.171810] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802423, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.241325] env[62519]: DEBUG nova.scheduler.client.report [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1578.412906] env[62519]: DEBUG nova.network.neutron [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Updating instance_info_cache with network_info: [{"id": "53564e7d-5a36-43d6-a79a-d13c12623d01", "address": "fa:16:3e:c5:8c:e0", "network": {"id": "3cf98c73-7859-4700-ba4c-831fd0971624", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1479507906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffa09c2044954d848a83de9c87fff94e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed9dc063-5c7a-4591-ba7d-b58b861d7f63", "external-id": "nsx-vlan-transportzone-37", "segmentation_id": 37, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53564e7d-5a", "ovs_interfaceid": "53564e7d-5a36-43d6-a79a-d13c12623d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.579709] env[62519]: DEBUG oslo_vmware.api [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802421, 'name': PowerOnVM_Task, 'duration_secs': 0.549153} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.580099] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1578.580351] env[62519]: DEBUG nova.compute.manager [None req-571d3639-ba38-4525-8b7d-0e38c143d5ca tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1578.581186] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be18713a-eda6-41a8-98a2-fa56dd6d35fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.672721] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802423, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.750667] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.751s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.751302] env[62519]: DEBUG nova.compute.manager [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1578.754896] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.812s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.755207] env[62519]: DEBUG nova.objects.instance [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lazy-loading 'resources' on Instance uuid 3b506d10-a427-47b8-ab5f-c35e450b7eb1 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1578.914495] env[62519]: DEBUG oslo_concurrency.lockutils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Releasing lock "refresh_cache-765cf18e-53a0-4cc6-ad0e-337a6f68915c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.915580] env[62519]: DEBUG nova.compute.manager [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Instance network_info: |[{"id": "53564e7d-5a36-43d6-a79a-d13c12623d01", "address": "fa:16:3e:c5:8c:e0", "network": {"id": "3cf98c73-7859-4700-ba4c-831fd0971624", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1479507906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffa09c2044954d848a83de9c87fff94e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed9dc063-5c7a-4591-ba7d-b58b861d7f63", "external-id": "nsx-vlan-transportzone-37", "segmentation_id": 37, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53564e7d-5a", "ovs_interfaceid": "53564e7d-5a36-43d6-a79a-d13c12623d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1578.915957] env[62519]: DEBUG oslo_concurrency.lockutils [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] Acquired lock "refresh_cache-765cf18e-53a0-4cc6-ad0e-337a6f68915c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.916269] env[62519]: DEBUG nova.network.neutron [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Refreshing network info cache for port 53564e7d-5a36-43d6-a79a-d13c12623d01 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1578.921707] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:8c:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed9dc063-5c7a-4591-ba7d-b58b861d7f63', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53564e7d-5a36-43d6-a79a-d13c12623d01', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1578.931406] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Creating folder: Project (ffa09c2044954d848a83de9c87fff94e). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1578.933930] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69662e8b-528d-4ffb-b3a9-0f5f83b0fb21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.954848] env[62519]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1578.954848] env[62519]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62519) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1578.955037] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Folder already exists: Project (ffa09c2044954d848a83de9c87fff94e). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1578.955243] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Creating folder: Instances. Parent ref: group-v373631. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1578.955511] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f04ff65-2692-4a91-b894-bbcc413df798 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.968143] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Created folder: Instances in parent group-v373631. [ 1578.968419] env[62519]: DEBUG oslo.service.loopingcall [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1578.968621] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1578.969218] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0fb7c2d-95fd-4a76-9df3-1675cde3205f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.993789] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1578.993789] env[62519]: value = "task-1802427" [ 1578.993789] env[62519]: _type = "Task" [ 1578.993789] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.007815] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802427, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.182835] env[62519]: DEBUG oslo_vmware.api [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802423, 'name': PowerOnVM_Task, 'duration_secs': 0.824424} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.183265] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1579.183517] env[62519]: INFO nova.compute.manager [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Took 10.68 seconds to spawn the instance on the hypervisor. [ 1579.183766] env[62519]: DEBUG nova.compute.manager [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1579.184956] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6d9cb5-91ed-4dbc-9e99-003888d39b79 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.262467] env[62519]: DEBUG nova.compute.utils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1579.268053] env[62519]: DEBUG nova.compute.manager [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1579.268053] env[62519]: DEBUG nova.network.neutron [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1579.367026] env[62519]: DEBUG nova.network.neutron [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Successfully updated port: 8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1579.447754] env[62519]: DEBUG nova.policy [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8355a912c5b4aef8ad7f30c2e64fdc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '555fa612766f4b5fa173664ca3fa496c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1579.508324] env[62519]: DEBUG nova.compute.manager [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1579.509681] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c554b640-8c14-48b8-850b-e103d5430e02 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.530097] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802427, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.718028] env[62519]: INFO nova.compute.manager [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Took 49.69 seconds to build instance. [ 1579.765206] env[62519]: DEBUG nova.compute.manager [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1579.776987] env[62519]: DEBUG nova.network.neutron [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Updated VIF entry in instance network info cache for port 53564e7d-5a36-43d6-a79a-d13c12623d01. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1579.778895] env[62519]: DEBUG nova.network.neutron [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Updating instance_info_cache with network_info: [{"id": "53564e7d-5a36-43d6-a79a-d13c12623d01", "address": "fa:16:3e:c5:8c:e0", "network": {"id": "3cf98c73-7859-4700-ba4c-831fd0971624", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1479507906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffa09c2044954d848a83de9c87fff94e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed9dc063-5c7a-4591-ba7d-b58b861d7f63", "external-id": "nsx-vlan-transportzone-37", "segmentation_id": 37, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53564e7d-5a", "ovs_interfaceid": "53564e7d-5a36-43d6-a79a-d13c12623d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.790595] env[62519]: DEBUG nova.compute.manager [req-33ff19ad-2023-4ebf-a070-d27108b0bd96 req-1c679e91-d18c-4dee-921c-c3fbf1d6ed5b service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Received event network-vif-plugged-8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1579.790804] env[62519]: DEBUG oslo_concurrency.lockutils [req-33ff19ad-2023-4ebf-a070-d27108b0bd96 req-1c679e91-d18c-4dee-921c-c3fbf1d6ed5b service nova] Acquiring lock "ad374dd9-a92d-4b76-9609-7562346e05a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.791027] env[62519]: DEBUG oslo_concurrency.lockutils [req-33ff19ad-2023-4ebf-a070-d27108b0bd96 req-1c679e91-d18c-4dee-921c-c3fbf1d6ed5b service nova] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.791190] env[62519]: DEBUG oslo_concurrency.lockutils [req-33ff19ad-2023-4ebf-a070-d27108b0bd96 req-1c679e91-d18c-4dee-921c-c3fbf1d6ed5b service nova] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.791353] env[62519]: DEBUG nova.compute.manager [req-33ff19ad-2023-4ebf-a070-d27108b0bd96 req-1c679e91-d18c-4dee-921c-c3fbf1d6ed5b service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] No waiting events found dispatching network-vif-plugged-8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1579.791512] env[62519]: WARNING nova.compute.manager [req-33ff19ad-2023-4ebf-a070-d27108b0bd96 req-1c679e91-d18c-4dee-921c-c3fbf1d6ed5b service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Received unexpected event network-vif-plugged-8d20f83b-f706-40ea-bba7-461aba73113f for instance with vm_state building and task_state spawning. [ 1579.870144] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.870565] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquired lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.870724] env[62519]: DEBUG nova.network.neutron [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1579.947189] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec67c57-7717-407a-a7b4-f454d6ab7b70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.956563] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7acd1aea-386e-4ba3-8dc5-61fe2a6e583b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.990766] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a293e39-a5c7-42ec-bad2-30731c3cffe1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.001574] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b94e32b-cc90-46a9-a931-9de686bc738e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.008835] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802427, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.019712] env[62519]: DEBUG nova.compute.provider_tree [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1580.035486] env[62519]: INFO nova.compute.manager [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] instance snapshotting [ 1580.040016] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ebc1a0-0e3d-425a-9a18-b84c19e6bdda {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.060427] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1212c7-9cee-4175-bc5f-ed79df5b74c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.286192] env[62519]: DEBUG oslo_concurrency.lockutils [req-4070b419-a7c1-4ba4-8f99-0ad84e9404fe req-09c1915b-06f4-4619-8f00-80a3e659e74b service nova] Releasing lock "refresh_cache-765cf18e-53a0-4cc6-ad0e-337a6f68915c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.423861] env[62519]: DEBUG nova.network.neutron [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1580.506120] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802427, 'name': CreateVM_Task, 'duration_secs': 1.444698} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.506288] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1580.506995] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373638', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'name': 'volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '765cf18e-53a0-4cc6-ad0e-337a6f68915c', 'attached_at': '', 'detached_at': '', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'serial': '9692c2cd-3d11-41b6-88ba-eae1158b8c33'}, 'guest_format': None, 'boot_index': 0, 'delete_on_termination': True, 'device_type': None, 'disk_bus': None, 'mount_device': '/dev/sda', 'attachment_id': '0411480b-968e-44bd-ace8-a04da3a97f45', 'volume_type': None}], 'swap': None} {{(pid=62519) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1580.507223] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Root volume attach. Driver type: vmdk {{(pid=62519) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1580.508022] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c1dcb2-30fc-410e-92cb-01074b05f68d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.517713] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9de2f7c-d364-452e-8e72-d73ef0493360 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.524837] env[62519]: DEBUG nova.scheduler.client.report [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1580.533606] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048b629d-34eb-41e3-87b3-30bf2ae8ee95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.550892] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-08b318ab-e417-44fc-bd2b-9ab0f36d8fcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.562878] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for the task: (returnval){ [ 1580.562878] env[62519]: value = "task-1802428" [ 1580.562878] env[62519]: _type = "Task" [ 1580.562878] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.573175] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802428, 'name': RelocateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.574185] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1580.574611] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-698e0783-7438-4dc8-943d-18255ff009a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.582524] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1580.582524] env[62519]: value = "task-1802429" [ 1580.582524] env[62519]: _type = "Task" [ 1580.582524] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.587655] env[62519]: DEBUG nova.network.neutron [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Updating instance_info_cache with network_info: [{"id": "8d20f83b-f706-40ea-bba7-461aba73113f", "address": "fa:16:3e:c3:bc:6c", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d20f83b-f7", "ovs_interfaceid": "8d20f83b-f706-40ea-bba7-461aba73113f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.596346] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802429, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.721029] env[62519]: DEBUG nova.network.neutron [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Successfully created port: bb52070a-ffaf-478e-b8c5-9273d58a0b34 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1580.786909] env[62519]: DEBUG nova.compute.manager [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1580.817774] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1580.818238] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1580.822343] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1580.823135] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1580.823135] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1580.823135] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1580.823356] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1580.823570] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1580.823857] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1580.824497] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1580.824497] env[62519]: DEBUG nova.virt.hardware [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1580.826629] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c61696-2fb1-4db7-81ca-595ebfede825 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.838916] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71ca384-0782-4ce0-b1d1-d2d9243b74c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.925194] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquiring lock "1b4efc0f-474f-4a32-b199-c14f27b183e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.043318] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.289s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.046297] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.992s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.046692] env[62519]: DEBUG nova.objects.instance [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lazy-loading 'resources' on Instance uuid 22380aef-c725-43a0-a957-06ced9518c21 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1581.071291] env[62519]: INFO nova.scheduler.client.report [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted allocations for instance 3b506d10-a427-47b8-ab5f-c35e450b7eb1 [ 1581.078694] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802428, 'name': RelocateVM_Task, 'duration_secs': 0.488889} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.082642] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1581.082642] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373638', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'name': 'volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '765cf18e-53a0-4cc6-ad0e-337a6f68915c', 'attached_at': '', 'detached_at': '', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'serial': '9692c2cd-3d11-41b6-88ba-eae1158b8c33'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1581.085790] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0cb55c-829c-407c-ba3d-ab6f11ed6975 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.092391] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Releasing lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.093836] env[62519]: DEBUG nova.compute.manager [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Instance network_info: |[{"id": "8d20f83b-f706-40ea-bba7-461aba73113f", "address": "fa:16:3e:c3:bc:6c", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d20f83b-f7", "ovs_interfaceid": "8d20f83b-f706-40ea-bba7-461aba73113f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1581.107880] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:bc:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d20f83b-f706-40ea-bba7-461aba73113f', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1581.116439] env[62519]: DEBUG oslo.service.loopingcall [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1581.118196] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1581.119088] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13064eda-842c-4a26-9719-8782e9fd5e2e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.125787] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af164ea4-d590-4530-a417-f13bba893bd0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.142217] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802429, 'name': CreateSnapshot_Task, 'duration_secs': 0.472013} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.143592] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1581.144718] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a77932-bd25-4a3e-a829-f65f76090f71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.182279] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33/volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1581.186017] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99c5b42b-4e4b-41f7-b8ca-605dd5e8342c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.211653] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1581.211653] env[62519]: value = "task-1802430" [ 1581.211653] env[62519]: _type = "Task" [ 1581.211653] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.229659] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4073dc01-e5b2-4480-9b24-60e6c1491ad2 tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "1b4efc0f-474f-4a32-b199-c14f27b183e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.168s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.229659] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for the task: (returnval){ [ 1581.229659] env[62519]: value = "task-1802431" [ 1581.229659] env[62519]: _type = "Task" [ 1581.229659] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.232071] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "1b4efc0f-474f-4a32-b199-c14f27b183e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.307s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.232396] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquiring lock "1b4efc0f-474f-4a32-b199-c14f27b183e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.232647] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "1b4efc0f-474f-4a32-b199-c14f27b183e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.232863] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "1b4efc0f-474f-4a32-b199-c14f27b183e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.235106] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802430, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.239480] env[62519]: INFO nova.compute.manager [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Terminating instance [ 1581.242711] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1581.242926] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373708', 'volume_id': '326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'name': 'volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd8780c40-0099-4ccc-84ae-72fbb14fa1ee', 'attached_at': '', 'detached_at': '', 'volume_id': '326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'serial': '326b2c58-a69e-42e3-a1c0-532dd3c5b724'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1581.244494] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8c37f1-a06e-4a72-ae0e-5920317e63f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.252321] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802431, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.268986] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daa97a3-873e-4a4e-a60f-6ac4c5a11f81 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.298872] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724/volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1581.299672] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb75b4a9-1171-4ae7-a811-338525e3e682 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.320013] env[62519]: DEBUG oslo_vmware.api [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1581.320013] env[62519]: value = "task-1802432" [ 1581.320013] env[62519]: _type = "Task" [ 1581.320013] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.329456] env[62519]: DEBUG oslo_vmware.api [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802432, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.587565] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89d0611e-f35d-463c-a92b-756efdb9d348 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "3b506d10-a427-47b8-ab5f-c35e450b7eb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.282s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.757543] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1581.761580] env[62519]: DEBUG nova.compute.manager [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1581.766333] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dc150a74-e74d-42b9-8607-9a6ec1d1ad53 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.780392] env[62519]: DEBUG nova.compute.manager [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1581.780642] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1581.781524] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c321185c-aaf3-4483-9f5a-4221c8539cae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.805659] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802430, 'name': CreateVM_Task, 'duration_secs': 0.377799} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.815979] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1581.815979] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802431, 'name': ReconfigVM_Task, 'duration_secs': 0.292779} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.816835] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1581.816835] env[62519]: value = "task-1802433" [ 1581.816835] env[62519]: _type = "Task" [ 1581.816835] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.817022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.818509] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.818876] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1581.819205] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33/volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1581.829046] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79e39560-d317-4726-b268-f291e4a707b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.830918] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ade0899-db7b-4124-ba1c-8c7aa7f76104 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.857176] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1581.871549] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02432761-5ed1-4aaf-bc00-fd25ce72ff2b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.882380] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1581.882380] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5216b184-ebee-d640-3288-fcadd36b128a" [ 1581.882380] env[62519]: _type = "Task" [ 1581.882380] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.890427] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for the task: (returnval){ [ 1581.890427] env[62519]: value = "task-1802434" [ 1581.890427] env[62519]: _type = "Task" [ 1581.890427] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.891062] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802433, 'name': CloneVM_Task} progress is 23%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.891353] env[62519]: DEBUG oslo_vmware.api [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802432, 'name': ReconfigVM_Task, 'duration_secs': 0.538953} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.897809] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Reconfigured VM instance instance-0000002b to attach disk [datastore1] volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724/volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1581.907624] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9aa67e51-8cae-4660-9243-2506d638553a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.918699] env[62519]: DEBUG oslo_vmware.api [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1581.918699] env[62519]: value = "task-1802435" [ 1581.918699] env[62519]: _type = "Task" [ 1581.918699] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.934405] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5216b184-ebee-d640-3288-fcadd36b128a, 'name': SearchDatastore_Task, 'duration_secs': 0.020154} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.934695] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802434, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.936555] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.936847] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1581.937140] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.938300] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.938300] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1581.938300] env[62519]: DEBUG oslo_vmware.api [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1581.938300] env[62519]: value = "task-1802436" [ 1581.938300] env[62519]: _type = "Task" [ 1581.938300] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.941026] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc5556fd-c0f7-4758-9f4c-eef8275ac8d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.951095] env[62519]: DEBUG oslo_vmware.api [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802435, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.957454] env[62519]: DEBUG oslo_vmware.api [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802436, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.960030] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1581.960030] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1581.960662] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8185c1d8-ad1a-413b-8991-099c62869b5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.971880] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1581.971880] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526845f3-02f5-8655-a8ff-8bffe3731ab6" [ 1581.971880] env[62519]: _type = "Task" [ 1581.971880] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.985720] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526845f3-02f5-8655-a8ff-8bffe3731ab6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.306862] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.317711] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1847879-f8ad-4a5d-8273-19408847b21e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.331975] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcab75b-dfda-4323-9758-8d33549718ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.342317] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802433, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.377483] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e15fb9-e865-40a9-82d6-9d96df06ba1c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.387193] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55714109-5ea6-48b8-9087-a32c11571c60 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.404088] env[62519]: DEBUG nova.compute.provider_tree [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1582.414571] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802434, 'name': ReconfigVM_Task, 'duration_secs': 0.167742} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.415620] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373638', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'name': 'volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '765cf18e-53a0-4cc6-ad0e-337a6f68915c', 'attached_at': '', 'detached_at': '', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'serial': '9692c2cd-3d11-41b6-88ba-eae1158b8c33'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1582.416244] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23129a32-7cb3-451a-a2a2-248a016a691a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.427905] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for the task: (returnval){ [ 1582.427905] env[62519]: value = "task-1802437" [ 1582.427905] env[62519]: _type = "Task" [ 1582.427905] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.435711] env[62519]: DEBUG oslo_vmware.api [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802435, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.441509] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802437, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.452845] env[62519]: DEBUG oslo_vmware.api [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802436, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.459660] env[62519]: DEBUG nova.compute.manager [req-6dac46bc-ad8d-4713-b1bb-695de1e9ec97 req-acb8de35-533b-4889-8da8-46ae1b46d6dc service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Received event network-changed-8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1582.459927] env[62519]: DEBUG nova.compute.manager [req-6dac46bc-ad8d-4713-b1bb-695de1e9ec97 req-acb8de35-533b-4889-8da8-46ae1b46d6dc service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Refreshing instance network info cache due to event network-changed-8d20f83b-f706-40ea-bba7-461aba73113f. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1582.460407] env[62519]: DEBUG oslo_concurrency.lockutils [req-6dac46bc-ad8d-4713-b1bb-695de1e9ec97 req-acb8de35-533b-4889-8da8-46ae1b46d6dc service nova] Acquiring lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.460622] env[62519]: DEBUG oslo_concurrency.lockutils [req-6dac46bc-ad8d-4713-b1bb-695de1e9ec97 req-acb8de35-533b-4889-8da8-46ae1b46d6dc service nova] Acquired lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.460879] env[62519]: DEBUG nova.network.neutron [req-6dac46bc-ad8d-4713-b1bb-695de1e9ec97 req-acb8de35-533b-4889-8da8-46ae1b46d6dc service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Refreshing network info cache for port 8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1582.484696] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526845f3-02f5-8655-a8ff-8bffe3731ab6, 'name': SearchDatastore_Task, 'duration_secs': 0.012447} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.485556] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3bed798-d85d-4264-9151-7ff1c6dd3ed5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.492493] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1582.492493] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522fbc1f-c160-df80-288f-a0fa9cee8152" [ 1582.492493] env[62519]: _type = "Task" [ 1582.492493] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.502400] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522fbc1f-c160-df80-288f-a0fa9cee8152, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.842260] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802433, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.912022] env[62519]: DEBUG nova.scheduler.client.report [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1582.941810] env[62519]: DEBUG oslo_vmware.api [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802435, 'name': PowerOffVM_Task, 'duration_secs': 0.862383} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.944635] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1582.944635] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1582.944635] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3af9716-d139-4a8d-96e8-69919e3fd814 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.953019] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802437, 'name': Rename_Task, 'duration_secs': 0.210924} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.953269] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1582.954171] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-116199ea-8031-45f8-9b4b-96f431562cc6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.968708] env[62519]: DEBUG oslo_vmware.api [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802436, 'name': ReconfigVM_Task, 'duration_secs': 0.818347} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.969565] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373708', 'volume_id': '326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'name': 'volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd8780c40-0099-4ccc-84ae-72fbb14fa1ee', 'attached_at': '', 'detached_at': '', 'volume_id': '326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'serial': '326b2c58-a69e-42e3-a1c0-532dd3c5b724'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1582.978399] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for the task: (returnval){ [ 1582.978399] env[62519]: value = "task-1802439" [ 1582.978399] env[62519]: _type = "Task" [ 1582.978399] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.989635] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802439, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.008767] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522fbc1f-c160-df80-288f-a0fa9cee8152, 'name': SearchDatastore_Task, 'duration_secs': 0.129352} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.009314] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.009874] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ad374dd9-a92d-4b76-9609-7562346e05a8/ad374dd9-a92d-4b76-9609-7562346e05a8.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1583.010450] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed945b01-8cab-401e-b7be-bfd48b161043 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.028028] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1583.028028] env[62519]: value = "task-1802440" [ 1583.028028] env[62519]: _type = "Task" [ 1583.028028] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.036160] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1583.037564] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1583.037564] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Deleting the datastore file [datastore1] 1b4efc0f-474f-4a32-b199-c14f27b183e2 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1583.037564] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abd9b603-4044-47aa-9509-78bca76bf109 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.042067] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802440, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.051419] env[62519]: DEBUG oslo_vmware.api [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for the task: (returnval){ [ 1583.051419] env[62519]: value = "task-1802441" [ 1583.051419] env[62519]: _type = "Task" [ 1583.051419] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.060415] env[62519]: DEBUG oslo_vmware.api [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.283905] env[62519]: DEBUG nova.network.neutron [req-6dac46bc-ad8d-4713-b1bb-695de1e9ec97 req-acb8de35-533b-4889-8da8-46ae1b46d6dc service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Updated VIF entry in instance network info cache for port 8d20f83b-f706-40ea-bba7-461aba73113f. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1583.283905] env[62519]: DEBUG nova.network.neutron [req-6dac46bc-ad8d-4713-b1bb-695de1e9ec97 req-acb8de35-533b-4889-8da8-46ae1b46d6dc service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Updating instance_info_cache with network_info: [{"id": "8d20f83b-f706-40ea-bba7-461aba73113f", "address": "fa:16:3e:c3:bc:6c", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d20f83b-f7", "ovs_interfaceid": "8d20f83b-f706-40ea-bba7-461aba73113f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.339549] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802433, 'name': CloneVM_Task, 'duration_secs': 1.50545} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.339907] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Created linked-clone VM from snapshot [ 1583.340581] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1aa3d7a-276b-400f-828f-702f77cdabe5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.352393] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Uploading image 544d45fd-dca6-4eff-834b-684a6a46bb55 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1583.385416] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1583.385416] env[62519]: value = "vm-373713" [ 1583.385416] env[62519]: _type = "VirtualMachine" [ 1583.385416] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1583.385739] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fa2ae661-b351-4fee-9cf0-5e7b49adc13a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.394868] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lease: (returnval){ [ 1583.394868] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52563370-c2de-6c4c-39c6-8126c10de101" [ 1583.394868] env[62519]: _type = "HttpNfcLease" [ 1583.394868] env[62519]: } obtained for exporting VM: (result){ [ 1583.394868] env[62519]: value = "vm-373713" [ 1583.394868] env[62519]: _type = "VirtualMachine" [ 1583.394868] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1583.395417] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the lease: (returnval){ [ 1583.395417] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52563370-c2de-6c4c-39c6-8126c10de101" [ 1583.395417] env[62519]: _type = "HttpNfcLease" [ 1583.395417] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1583.405550] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1583.405550] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52563370-c2de-6c4c-39c6-8126c10de101" [ 1583.405550] env[62519]: _type = "HttpNfcLease" [ 1583.405550] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1583.416124] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.370s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.418807] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.530s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.421020] env[62519]: INFO nova.compute.claims [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1583.463053] env[62519]: INFO nova.scheduler.client.report [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Deleted allocations for instance 22380aef-c725-43a0-a957-06ced9518c21 [ 1583.494863] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802439, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.544930] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802440, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.564965] env[62519]: DEBUG oslo_vmware.api [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Task: {'id': task-1802441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237305} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.565425] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1583.565700] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1583.565700] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1583.565870] env[62519]: INFO nova.compute.manager [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Took 1.79 seconds to destroy the instance on the hypervisor. [ 1583.567131] env[62519]: DEBUG oslo.service.loopingcall [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1583.567131] env[62519]: DEBUG nova.compute.manager [-] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1583.567131] env[62519]: DEBUG nova.network.neutron [-] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1583.679908] env[62519]: DEBUG nova.network.neutron [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Successfully updated port: bb52070a-ffaf-478e-b8c5-9273d58a0b34 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1583.788251] env[62519]: DEBUG oslo_concurrency.lockutils [req-6dac46bc-ad8d-4713-b1bb-695de1e9ec97 req-acb8de35-533b-4889-8da8-46ae1b46d6dc service nova] Releasing lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.904311] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1583.904311] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52563370-c2de-6c4c-39c6-8126c10de101" [ 1583.904311] env[62519]: _type = "HttpNfcLease" [ 1583.904311] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1583.904576] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1583.904576] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52563370-c2de-6c4c-39c6-8126c10de101" [ 1583.904576] env[62519]: _type = "HttpNfcLease" [ 1583.904576] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1583.907408] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ee8779-4253-442e-8d0b-bcf8a7e43c3d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.917778] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524d7817-13b0-51b3-660b-e68448fe2431/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1583.917778] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524d7817-13b0-51b3-660b-e68448fe2431/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1583.995055] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d4f64c01-4859-459d-a49e-3e38bfd7e3ac tempest-ServerAddressesNegativeTestJSON-801003583 tempest-ServerAddressesNegativeTestJSON-801003583-project-member] Lock "22380aef-c725-43a0-a957-06ced9518c21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.833s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.001991] env[62519]: DEBUG oslo_vmware.api [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802439, 'name': PowerOnVM_Task, 'duration_secs': 0.729986} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.002324] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1584.002548] env[62519]: INFO nova.compute.manager [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Took 6.89 seconds to spawn the instance on the hypervisor. [ 1584.002791] env[62519]: DEBUG nova.compute.manager [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1584.003589] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a40cd21-5dc8-4b78-86dd-389496b52009 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.039157] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802440, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699231} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.039463] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ad374dd9-a92d-4b76-9609-7562346e05a8/ad374dd9-a92d-4b76-9609-7562346e05a8.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1584.039697] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1584.039969] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf3b0012-f8cf-4a6d-8933-5e5e1df63309 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.045527] env[62519]: DEBUG nova.objects.instance [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lazy-loading 'flavor' on Instance uuid d8780c40-0099-4ccc-84ae-72fbb14fa1ee {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1584.048644] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8d180875-41dc-4852-b123-669fe2e193cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.054970] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1584.054970] env[62519]: value = "task-1802443" [ 1584.054970] env[62519]: _type = "Task" [ 1584.054970] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.069588] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802443, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.186307] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "refresh_cache-dac173ff-1807-405f-a59c-bb2efef62a0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.187400] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "refresh_cache-dac173ff-1807-405f-a59c-bb2efef62a0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.187400] env[62519]: DEBUG nova.network.neutron [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1584.412774] env[62519]: DEBUG nova.network.neutron [-] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.476711] env[62519]: DEBUG nova.compute.manager [req-526da486-c0d5-4dc7-97f0-6a14b0f6bb65 req-1ee9724e-8983-4f0d-b455-7c7e73ecff2d service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Received event network-vif-plugged-bb52070a-ffaf-478e-b8c5-9273d58a0b34 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1584.479043] env[62519]: DEBUG oslo_concurrency.lockutils [req-526da486-c0d5-4dc7-97f0-6a14b0f6bb65 req-1ee9724e-8983-4f0d-b455-7c7e73ecff2d service nova] Acquiring lock "dac173ff-1807-405f-a59c-bb2efef62a0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.479043] env[62519]: DEBUG oslo_concurrency.lockutils [req-526da486-c0d5-4dc7-97f0-6a14b0f6bb65 req-1ee9724e-8983-4f0d-b455-7c7e73ecff2d service nova] Lock "dac173ff-1807-405f-a59c-bb2efef62a0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.479043] env[62519]: DEBUG oslo_concurrency.lockutils [req-526da486-c0d5-4dc7-97f0-6a14b0f6bb65 req-1ee9724e-8983-4f0d-b455-7c7e73ecff2d service nova] Lock "dac173ff-1807-405f-a59c-bb2efef62a0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.479043] env[62519]: DEBUG nova.compute.manager [req-526da486-c0d5-4dc7-97f0-6a14b0f6bb65 req-1ee9724e-8983-4f0d-b455-7c7e73ecff2d service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] No waiting events found dispatching network-vif-plugged-bb52070a-ffaf-478e-b8c5-9273d58a0b34 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1584.479043] env[62519]: WARNING nova.compute.manager [req-526da486-c0d5-4dc7-97f0-6a14b0f6bb65 req-1ee9724e-8983-4f0d-b455-7c7e73ecff2d service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Received unexpected event network-vif-plugged-bb52070a-ffaf-478e-b8c5-9273d58a0b34 for instance with vm_state building and task_state spawning. [ 1584.525761] env[62519]: INFO nova.compute.manager [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Took 42.58 seconds to build instance. [ 1584.559647] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e418db9a-80a0-48d0-b7f9-5c74a86504d6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.456s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.582149] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802443, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.255056} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.582149] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1584.582386] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9630497d-9f83-489c-aaa9-d310ecc5897c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.613349] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] ad374dd9-a92d-4b76-9609-7562346e05a8/ad374dd9-a92d-4b76-9609-7562346e05a8.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1584.618590] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82924b06-6eb1-4638-b4c2-b5067bc99c35 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.644422] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1584.644422] env[62519]: value = "task-1802444" [ 1584.644422] env[62519]: _type = "Task" [ 1584.644422] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.661832] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.701448] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "417588f8-6288-4ecd-9764-dbc923549c5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.701791] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "417588f8-6288-4ecd-9764-dbc923549c5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.737453] env[62519]: DEBUG nova.network.neutron [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1584.910997] env[62519]: DEBUG nova.network.neutron [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Updating instance_info_cache with network_info: [{"id": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "address": "fa:16:3e:9c:1f:3e", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb52070a-ff", "ovs_interfaceid": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.917535] env[62519]: INFO nova.compute.manager [-] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Took 1.35 seconds to deallocate network for instance. [ 1585.155949] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.186421] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7f4de1-c09b-4fab-a9fa-09d6b0bda10a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.195288] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a740d950-50c4-4ef9-ac99-c80dce70e3d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.231857] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab93f277-14d2-4547-adcd-bf5e4b83f1fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.240238] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a4274e-2b77-491e-b81d-0bbc511bdc7c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.256315] env[62519]: DEBUG nova.compute.provider_tree [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1585.411816] env[62519]: DEBUG nova.compute.manager [req-97424ee9-161a-46a3-b3e3-524db0b0c4ba req-146dd9a7-685b-4a36-a980-0c9cfbc86182 service nova] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Received event network-vif-deleted-ec32b319-d783-4ebf-997e-88d3a805675f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1585.413963] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "refresh_cache-dac173ff-1807-405f-a59c-bb2efef62a0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.414316] env[62519]: DEBUG nova.compute.manager [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Instance network_info: |[{"id": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "address": "fa:16:3e:9c:1f:3e", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb52070a-ff", "ovs_interfaceid": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1585.414746] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:1f:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb52070a-ffaf-478e-b8c5-9273d58a0b34', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1585.422735] env[62519]: DEBUG oslo.service.loopingcall [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1585.423336] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1585.423585] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50e07cd0-76bd-4a35-a2ef-d80ddb4bdce4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.440999] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.448269] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1585.448269] env[62519]: value = "task-1802445" [ 1585.448269] env[62519]: _type = "Task" [ 1585.448269] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.458137] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802445, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.658425] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.759759] env[62519]: DEBUG nova.scheduler.client.report [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1585.822412] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.822692] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.822911] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.823112] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.823283] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.825858] env[62519]: INFO nova.compute.manager [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Terminating instance [ 1585.961468] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802445, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.034565] env[62519]: DEBUG oslo_concurrency.lockutils [None req-df0df1dc-dc85-41ce-be1d-c03bdfcf0ead tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.742s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.160607] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802444, 'name': ReconfigVM_Task, 'duration_secs': 1.197743} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.160913] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Reconfigured VM instance instance-0000002f to attach disk [datastore1] ad374dd9-a92d-4b76-9609-7562346e05a8/ad374dd9-a92d-4b76-9609-7562346e05a8.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1586.162636] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-acfdc879-0ea0-4a95-9612-294548e73474 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.170522] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1586.170522] env[62519]: value = "task-1802446" [ 1586.170522] env[62519]: _type = "Task" [ 1586.170522] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.188185] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802446, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.267355] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.848s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.268891] env[62519]: DEBUG nova.compute.manager [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1586.273405] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.593s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.275445] env[62519]: INFO nova.compute.claims [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1586.331095] env[62519]: DEBUG nova.compute.manager [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1586.331406] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1586.332017] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29dab76c-7acc-449f-9589-b46ff1e2e40d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.345037] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1586.345037] env[62519]: value = "task-1802447" [ 1586.345037] env[62519]: _type = "Task" [ 1586.345037] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.354962] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.461355] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802445, 'name': CreateVM_Task, 'duration_secs': 0.521673} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.461688] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1586.462341] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.462560] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.463044] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1586.463445] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93919493-7be7-484c-ac1f-dd8659c4b7c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.468937] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1586.468937] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f8f153-5e92-6a1c-47a1-7ec18525c600" [ 1586.468937] env[62519]: _type = "Task" [ 1586.468937] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.478632] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f8f153-5e92-6a1c-47a1-7ec18525c600, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.538427] env[62519]: DEBUG nova.compute.manager [None req-76861171-414a-4622-8909-66c97ea75102 tempest-ServersListShow296Test-771773119 tempest-ServersListShow296Test-771773119-project-member] [instance: c28254d5-90ec-421a-b7a5-5b6f16cb9268] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1586.683347] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802446, 'name': Rename_Task, 'duration_secs': 0.174283} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.683637] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1586.683889] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25a571fc-40ec-43e2-916c-d6ad4f5d1b06 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.691869] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1586.691869] env[62519]: value = "task-1802448" [ 1586.691869] env[62519]: _type = "Task" [ 1586.691869] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.709500] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.760108] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.760500] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.775263] env[62519]: DEBUG nova.compute.utils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1586.776708] env[62519]: DEBUG nova.compute.manager [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1586.776940] env[62519]: DEBUG nova.network.neutron [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1586.840869] env[62519]: DEBUG nova.policy [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81bb350c0ff54453b99b45ac84a82935', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '549cc35f5ff249f6bf22c67872883db0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1586.861180] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802447, 'name': PowerOffVM_Task, 'duration_secs': 0.203331} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.861180] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1586.861180] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1586.861180] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373708', 'volume_id': '326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'name': 'volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd8780c40-0099-4ccc-84ae-72fbb14fa1ee', 'attached_at': '', 'detached_at': '', 'volume_id': '326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'serial': '326b2c58-a69e-42e3-a1c0-532dd3c5b724'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1586.861180] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9dbaf77-02a9-4326-8193-a722d4ef25e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.883838] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c257d5-21ab-4f85-b8f0-91b06077c055 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.897055] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faab2b6f-0c4e-4ea2-a7ec-884b0de00e6a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.922450] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2871b8b9-80d3-42ef-8e4e-34738a5c8d7c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.946085] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] The volume has not been displaced from its original location: [datastore1] volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724/volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1586.952019] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Reconfiguring VM instance instance-0000002b to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1586.952825] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2197ddf-e33a-42f6-a888-1a2d7b5d100f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.981557] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f8f153-5e92-6a1c-47a1-7ec18525c600, 'name': SearchDatastore_Task, 'duration_secs': 0.009858} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.984226] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.984226] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1586.984226] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.984226] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.984226] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1586.984572] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1586.984572] env[62519]: value = "task-1802449" [ 1586.984572] env[62519]: _type = "Task" [ 1586.984572] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.984572] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5908370f-1c11-4343-a326-6ee6a80b3b61 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.995891] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802449, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.002105] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1587.002169] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1587.003016] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae9a8e9-f0e0-436c-bc54-d964238a606b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.009315] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1587.009315] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52917e4a-f8d0-2fac-7d9a-03f7ffa5a4e2" [ 1587.009315] env[62519]: _type = "Task" [ 1587.009315] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.028644] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52917e4a-f8d0-2fac-7d9a-03f7ffa5a4e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.044537] env[62519]: DEBUG nova.compute.manager [None req-76861171-414a-4622-8909-66c97ea75102 tempest-ServersListShow296Test-771773119 tempest-ServersListShow296Test-771773119-project-member] [instance: c28254d5-90ec-421a-b7a5-5b6f16cb9268] Instance disappeared before build. {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2448}} [ 1587.177199] env[62519]: DEBUG nova.compute.manager [req-d2dfba29-427b-4b0f-ae3d-337298efdf96 req-edf6726f-d9ae-4104-9ba9-6c7e50248e73 service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Received event network-changed-bb52070a-ffaf-478e-b8c5-9273d58a0b34 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1587.177395] env[62519]: DEBUG nova.compute.manager [req-d2dfba29-427b-4b0f-ae3d-337298efdf96 req-edf6726f-d9ae-4104-9ba9-6c7e50248e73 service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Refreshing instance network info cache due to event network-changed-bb52070a-ffaf-478e-b8c5-9273d58a0b34. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1587.177609] env[62519]: DEBUG oslo_concurrency.lockutils [req-d2dfba29-427b-4b0f-ae3d-337298efdf96 req-edf6726f-d9ae-4104-9ba9-6c7e50248e73 service nova] Acquiring lock "refresh_cache-dac173ff-1807-405f-a59c-bb2efef62a0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.177884] env[62519]: DEBUG oslo_concurrency.lockutils [req-d2dfba29-427b-4b0f-ae3d-337298efdf96 req-edf6726f-d9ae-4104-9ba9-6c7e50248e73 service nova] Acquired lock "refresh_cache-dac173ff-1807-405f-a59c-bb2efef62a0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.178054] env[62519]: DEBUG nova.network.neutron [req-d2dfba29-427b-4b0f-ae3d-337298efdf96 req-edf6726f-d9ae-4104-9ba9-6c7e50248e73 service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Refreshing network info cache for port bb52070a-ffaf-478e-b8c5-9273d58a0b34 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1587.202403] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802448, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.258779] env[62519]: DEBUG nova.network.neutron [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Successfully created port: 3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1587.282936] env[62519]: DEBUG nova.compute.manager [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1587.503031] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802449, 'name': ReconfigVM_Task, 'duration_secs': 0.410141} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.503369] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Reconfigured VM instance instance-0000002b to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1587.512269] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91d3d253-84c2-4089-9032-3166bb3aa2f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.537457] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52917e4a-f8d0-2fac-7d9a-03f7ffa5a4e2, 'name': SearchDatastore_Task, 'duration_secs': 0.017498} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.539433] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1587.539433] env[62519]: value = "task-1802450" [ 1587.539433] env[62519]: _type = "Task" [ 1587.539433] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.542159] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12f103d0-2253-4d38-881b-0d6f7f7abfec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.551624] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1587.551624] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52078344-70ae-0681-4be7-508246ba2c5b" [ 1587.551624] env[62519]: _type = "Task" [ 1587.551624] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.556319] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.565197] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52078344-70ae-0681-4be7-508246ba2c5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.702386] env[62519]: DEBUG nova.compute.manager [req-0cb1800f-7a81-4e86-ba85-15e55e446db3 req-0fb65836-f5b8-4f13-acb6-c27b4978742e service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Received event network-changed-53564e7d-5a36-43d6-a79a-d13c12623d01 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1587.702386] env[62519]: DEBUG nova.compute.manager [req-0cb1800f-7a81-4e86-ba85-15e55e446db3 req-0fb65836-f5b8-4f13-acb6-c27b4978742e service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Refreshing instance network info cache due to event network-changed-53564e7d-5a36-43d6-a79a-d13c12623d01. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1587.703310] env[62519]: DEBUG oslo_concurrency.lockutils [req-0cb1800f-7a81-4e86-ba85-15e55e446db3 req-0fb65836-f5b8-4f13-acb6-c27b4978742e service nova] Acquiring lock "refresh_cache-765cf18e-53a0-4cc6-ad0e-337a6f68915c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.703310] env[62519]: DEBUG oslo_concurrency.lockutils [req-0cb1800f-7a81-4e86-ba85-15e55e446db3 req-0fb65836-f5b8-4f13-acb6-c27b4978742e service nova] Acquired lock "refresh_cache-765cf18e-53a0-4cc6-ad0e-337a6f68915c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.703310] env[62519]: DEBUG nova.network.neutron [req-0cb1800f-7a81-4e86-ba85-15e55e446db3 req-0fb65836-f5b8-4f13-acb6-c27b4978742e service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Refreshing network info cache for port 53564e7d-5a36-43d6-a79a-d13c12623d01 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1587.708927] env[62519]: DEBUG oslo_vmware.api [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802448, 'name': PowerOnVM_Task, 'duration_secs': 0.749025} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.709189] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1587.709493] env[62519]: INFO nova.compute.manager [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Took 9.67 seconds to spawn the instance on the hypervisor. [ 1587.709714] env[62519]: DEBUG nova.compute.manager [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1587.710577] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1291766f-1c14-4ecc-9699-5fe89f30e55f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.988500] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47017d78-5364-4617-bada-63289c84fb18 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.999504] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5d30bc-fb6e-44b3-8b31-98e907edc7cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.030342] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd9afd4-c89c-4cae-aa1c-7eddfa9c7018 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.038260] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07c1802-a374-4051-b116-2c8f74246ae0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.054740] env[62519]: DEBUG nova.compute.provider_tree [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1588.056773] env[62519]: DEBUG nova.network.neutron [req-d2dfba29-427b-4b0f-ae3d-337298efdf96 req-edf6726f-d9ae-4104-9ba9-6c7e50248e73 service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Updated VIF entry in instance network info cache for port bb52070a-ffaf-478e-b8c5-9273d58a0b34. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1588.057093] env[62519]: DEBUG nova.network.neutron [req-d2dfba29-427b-4b0f-ae3d-337298efdf96 req-edf6726f-d9ae-4104-9ba9-6c7e50248e73 service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Updating instance_info_cache with network_info: [{"id": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "address": "fa:16:3e:9c:1f:3e", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb52070a-ff", "ovs_interfaceid": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.074935] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52078344-70ae-0681-4be7-508246ba2c5b, 'name': SearchDatastore_Task, 'duration_secs': 0.029203} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.075290] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802450, 'name': ReconfigVM_Task, 'duration_secs': 0.262282} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.077028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.077311] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] dac173ff-1807-405f-a59c-bb2efef62a0c/dac173ff-1807-405f-a59c-bb2efef62a0c.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1588.077669] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373708', 'volume_id': '326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'name': 'volume-326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd8780c40-0099-4ccc-84ae-72fbb14fa1ee', 'attached_at': '', 'detached_at': '', 'volume_id': '326b2c58-a69e-42e3-a1c0-532dd3c5b724', 'serial': '326b2c58-a69e-42e3-a1c0-532dd3c5b724'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1588.078013] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1588.078317] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-956953fb-09f4-4ebb-929c-c2296507b2f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.081374] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2de551f-100f-4a16-b3a1-9645188e1c49 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.090557] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1588.094183] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d162084f-3ec8-4d4a-bd86-422124d4e37a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.096227] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1588.096227] env[62519]: value = "task-1802451" [ 1588.096227] env[62519]: _type = "Task" [ 1588.096227] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.105880] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802451, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.172392] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1588.172625] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1588.172810] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleting the datastore file [datastore1] d8780c40-0099-4ccc-84ae-72fbb14fa1ee {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1588.173090] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64a9a5ba-7c24-404e-b0ff-14028d19372e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.179794] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1588.179794] env[62519]: value = "task-1802453" [ 1588.179794] env[62519]: _type = "Task" [ 1588.179794] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.189290] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.241952] env[62519]: INFO nova.compute.manager [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Took 40.73 seconds to build instance. [ 1588.303742] env[62519]: DEBUG nova.compute.manager [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1588.346244] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1588.346506] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1588.346663] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1588.346906] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1588.347146] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1588.347368] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1588.347639] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1588.347892] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1588.348112] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1588.348252] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1588.348421] env[62519]: DEBUG nova.virt.hardware [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1588.349389] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e35408-c78b-4801-85f4-be5493ced276 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.358286] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9178ab-9827-470e-a07d-ffebf4bf8037 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.562716] env[62519]: DEBUG oslo_concurrency.lockutils [req-d2dfba29-427b-4b0f-ae3d-337298efdf96 req-edf6726f-d9ae-4104-9ba9-6c7e50248e73 service nova] Releasing lock "refresh_cache-dac173ff-1807-405f-a59c-bb2efef62a0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.563753] env[62519]: DEBUG nova.scheduler.client.report [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1588.580368] env[62519]: DEBUG oslo_concurrency.lockutils [None req-76861171-414a-4622-8909-66c97ea75102 tempest-ServersListShow296Test-771773119 tempest-ServersListShow296Test-771773119-project-member] Lock "c28254d5-90ec-421a-b7a5-5b6f16cb9268" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.994s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.608303] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802451, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.693873] env[62519]: DEBUG oslo_vmware.api [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.283597} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.694587] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1588.694839] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1588.694976] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1588.695183] env[62519]: INFO nova.compute.manager [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Took 2.36 seconds to destroy the instance on the hypervisor. [ 1588.695646] env[62519]: DEBUG oslo.service.loopingcall [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.695984] env[62519]: DEBUG nova.compute.manager [-] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1588.696106] env[62519]: DEBUG nova.network.neutron [-] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1588.728813] env[62519]: DEBUG nova.network.neutron [req-0cb1800f-7a81-4e86-ba85-15e55e446db3 req-0fb65836-f5b8-4f13-acb6-c27b4978742e service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Updated VIF entry in instance network info cache for port 53564e7d-5a36-43d6-a79a-d13c12623d01. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1588.729851] env[62519]: DEBUG nova.network.neutron [req-0cb1800f-7a81-4e86-ba85-15e55e446db3 req-0fb65836-f5b8-4f13-acb6-c27b4978742e service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Updating instance_info_cache with network_info: [{"id": "53564e7d-5a36-43d6-a79a-d13c12623d01", "address": "fa:16:3e:c5:8c:e0", "network": {"id": "3cf98c73-7859-4700-ba4c-831fd0971624", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1479507906-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ffa09c2044954d848a83de9c87fff94e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed9dc063-5c7a-4591-ba7d-b58b861d7f63", "external-id": "nsx-vlan-transportzone-37", "segmentation_id": 37, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53564e7d-5a", "ovs_interfaceid": "53564e7d-5a36-43d6-a79a-d13c12623d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.071325] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.798s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.071984] env[62519]: DEBUG nova.compute.manager [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1589.076619] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.241s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.078757] env[62519]: INFO nova.compute.claims [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1589.084279] env[62519]: DEBUG nova.compute.manager [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1589.113522] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802451, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.233048] env[62519]: DEBUG oslo_concurrency.lockutils [req-0cb1800f-7a81-4e86-ba85-15e55e446db3 req-0fb65836-f5b8-4f13-acb6-c27b4978742e service nova] Releasing lock "refresh_cache-765cf18e-53a0-4cc6-ad0e-337a6f68915c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.369791] env[62519]: DEBUG nova.network.neutron [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Successfully updated port: 3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1589.586032] env[62519]: DEBUG nova.compute.utils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1589.588227] env[62519]: DEBUG nova.compute.manager [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1589.588632] env[62519]: DEBUG nova.network.neutron [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1589.613105] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802451, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.220952} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.613105] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] dac173ff-1807-405f-a59c-bb2efef62a0c/dac173ff-1807-405f-a59c-bb2efef62a0c.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1589.613105] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1589.613105] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-340c49dd-b09f-4222-944c-1c44b870461b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.615383] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.617682] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1589.617682] env[62519]: value = "task-1802454" [ 1589.617682] env[62519]: _type = "Task" [ 1589.617682] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.627709] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802454, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.659493] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.659834] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.660107] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.660349] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.660557] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.662979] env[62519]: INFO nova.compute.manager [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Terminating instance [ 1589.748271] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b80a0845-960b-4b9e-b305-46f7cb345916 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.335s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.762152] env[62519]: DEBUG nova.compute.manager [req-b59a9399-30fa-46b1-963d-abe7c6f34f52 req-96711da0-886e-4d7f-abe8-0e9c664b1641 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Received event network-changed-8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1589.762152] env[62519]: DEBUG nova.compute.manager [req-b59a9399-30fa-46b1-963d-abe7c6f34f52 req-96711da0-886e-4d7f-abe8-0e9c664b1641 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Refreshing instance network info cache due to event network-changed-8d20f83b-f706-40ea-bba7-461aba73113f. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1589.762152] env[62519]: DEBUG oslo_concurrency.lockutils [req-b59a9399-30fa-46b1-963d-abe7c6f34f52 req-96711da0-886e-4d7f-abe8-0e9c664b1641 service nova] Acquiring lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.762152] env[62519]: DEBUG oslo_concurrency.lockutils [req-b59a9399-30fa-46b1-963d-abe7c6f34f52 req-96711da0-886e-4d7f-abe8-0e9c664b1641 service nova] Acquired lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.762152] env[62519]: DEBUG nova.network.neutron [req-b59a9399-30fa-46b1-963d-abe7c6f34f52 req-96711da0-886e-4d7f-abe8-0e9c664b1641 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Refreshing network info cache for port 8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1589.810156] env[62519]: DEBUG nova.network.neutron [-] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.835615] env[62519]: DEBUG nova.policy [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12d88579b24941a0be744afe44126360', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df6727c290724a8ebef5188c77e91399', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1589.873145] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "refresh_cache-80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.873354] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "refresh_cache-80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.873501] env[62519]: DEBUG nova.network.neutron [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1589.952658] env[62519]: DEBUG nova.compute.manager [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Received event network-vif-plugged-3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1589.953022] env[62519]: DEBUG oslo_concurrency.lockutils [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] Acquiring lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.953416] env[62519]: DEBUG oslo_concurrency.lockutils [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.953579] env[62519]: DEBUG oslo_concurrency.lockutils [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.953805] env[62519]: DEBUG nova.compute.manager [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] No waiting events found dispatching network-vif-plugged-3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1589.953956] env[62519]: WARNING nova.compute.manager [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Received unexpected event network-vif-plugged-3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8 for instance with vm_state building and task_state spawning. [ 1589.954180] env[62519]: DEBUG nova.compute.manager [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Received event network-vif-deleted-d13d36cf-2bb2-4dde-a6de-51669dc83351 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1589.954366] env[62519]: DEBUG nova.compute.manager [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Received event network-changed-3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1589.954540] env[62519]: DEBUG nova.compute.manager [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Refreshing instance network info cache due to event network-changed-3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1589.954752] env[62519]: DEBUG oslo_concurrency.lockutils [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] Acquiring lock "refresh_cache-80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.089131] env[62519]: DEBUG nova.compute.manager [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1590.133361] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802454, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092815} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.136147] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1590.136147] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8daac6f-7c1e-464d-b300-5f4ddb040f42 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.162649] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] dac173ff-1807-405f-a59c-bb2efef62a0c/dac173ff-1807-405f-a59c-bb2efef62a0c.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1590.166512] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71573cc8-83f9-4fe3-b13f-9b5def8b4690 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.183664] env[62519]: DEBUG nova.compute.manager [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1590.186068] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1590.186068] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267e0406-a23b-4eea-94e1-b6bd509f20b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.194479] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1590.198847] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6e4e7c0-092e-44e3-a365-f3ce9cc26202 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.200837] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1590.200837] env[62519]: value = "task-1802455" [ 1590.200837] env[62519]: _type = "Task" [ 1590.200837] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.208366] env[62519]: DEBUG oslo_vmware.api [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1590.208366] env[62519]: value = "task-1802456" [ 1590.208366] env[62519]: _type = "Task" [ 1590.208366] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.212397] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802455, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.226692] env[62519]: DEBUG oslo_vmware.api [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802456, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.251438] env[62519]: DEBUG nova.compute.manager [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1590.314200] env[62519]: INFO nova.compute.manager [-] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Took 1.62 seconds to deallocate network for instance. [ 1590.416150] env[62519]: DEBUG nova.network.neutron [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Successfully created port: b3cdcca9-9468-453b-894e-326f90b3cb34 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1590.454734] env[62519]: DEBUG nova.network.neutron [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1590.483981] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.484305] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.484444] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.484700] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.484896] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.488150] env[62519]: INFO nova.compute.manager [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Terminating instance [ 1590.560948] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "ad374dd9-a92d-4b76-9609-7562346e05a8" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.561194] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.561416] env[62519]: INFO nova.compute.manager [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Rebooting instance [ 1590.711727] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802455, 'name': ReconfigVM_Task, 'duration_secs': 0.447273} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.712074] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Reconfigured VM instance instance-00000030 to attach disk [datastore1] dac173ff-1807-405f-a59c-bb2efef62a0c/dac173ff-1807-405f-a59c-bb2efef62a0c.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1590.712753] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2f7ca2c-fc62-442a-8266-4910d6ad5bf8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.727023] env[62519]: DEBUG oslo_vmware.api [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802456, 'name': PowerOffVM_Task, 'duration_secs': 0.24302} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.728977] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1590.729193] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1590.730107] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1590.730107] env[62519]: value = "task-1802457" [ 1590.730107] env[62519]: _type = "Task" [ 1590.730107] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.730626] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38679826-6a5c-4ef7-a87d-f5aaf3eb4590 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.746548] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802457, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.754028] env[62519]: DEBUG nova.network.neutron [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Updating instance_info_cache with network_info: [{"id": "3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8", "address": "fa:16:3e:b4:eb:90", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dd85b46-bf", "ovs_interfaceid": "3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.782732] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.808949] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d72ac1-7a42-4af0-85ea-8063d032d5af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.814853] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1590.815314] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1590.815539] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Deleting the datastore file [datastore1] 2b68e95a-1d93-43ee-b0a6-996c9042f5c7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1590.816280] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-537fd858-f0e8-4be1-9ca8-fc345d377877 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.825056] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761169f2-325a-4893-8174-08ace7a1345c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.832528] env[62519]: DEBUG oslo_vmware.api [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1590.832528] env[62519]: value = "task-1802459" [ 1590.832528] env[62519]: _type = "Task" [ 1590.832528] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.866449] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2c1165-003a-49cc-8717-d151bc8ffcdc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.873713] env[62519]: DEBUG oslo_vmware.api [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.878550] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247c40cb-4510-4883-892b-9c12ae57fc9c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.895296] env[62519]: DEBUG nova.compute.provider_tree [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1590.900524] env[62519]: INFO nova.compute.manager [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Took 0.59 seconds to detach 1 volumes for instance. [ 1590.992783] env[62519]: DEBUG nova.compute.manager [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1590.993096] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1590.993996] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829928fe-8ee6-408d-b9bc-b633bf93145a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.002272] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1591.002272] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3df12e46-7240-4431-946b-9b62f595e5f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.010813] env[62519]: DEBUG oslo_vmware.api [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1591.010813] env[62519]: value = "task-1802460" [ 1591.010813] env[62519]: _type = "Task" [ 1591.010813] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.016651] env[62519]: DEBUG nova.network.neutron [req-b59a9399-30fa-46b1-963d-abe7c6f34f52 req-96711da0-886e-4d7f-abe8-0e9c664b1641 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Updated VIF entry in instance network info cache for port 8d20f83b-f706-40ea-bba7-461aba73113f. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1591.017022] env[62519]: DEBUG nova.network.neutron [req-b59a9399-30fa-46b1-963d-abe7c6f34f52 req-96711da0-886e-4d7f-abe8-0e9c664b1641 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Updating instance_info_cache with network_info: [{"id": "8d20f83b-f706-40ea-bba7-461aba73113f", "address": "fa:16:3e:c3:bc:6c", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d20f83b-f7", "ovs_interfaceid": "8d20f83b-f706-40ea-bba7-461aba73113f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.022627] env[62519]: DEBUG oslo_vmware.api [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802460, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.083013] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.102587] env[62519]: DEBUG nova.compute.manager [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1591.130650] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1591.130975] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1591.131061] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1591.131263] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1591.131461] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1591.131639] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1591.131950] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1591.132259] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1591.132406] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1591.132610] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1591.132832] env[62519]: DEBUG nova.virt.hardware [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1591.134040] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1835bd00-ed6e-4c06-8f7c-06cc0404d743 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.142930] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a27505-9e53-4bd9-a0f6-7fc23064bedd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.245149] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802457, 'name': Rename_Task, 'duration_secs': 0.230152} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.245431] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1591.245732] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72501680-b477-442d-8ba8-f6ce7278ad4e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.252645] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1591.252645] env[62519]: value = "task-1802461" [ 1591.252645] env[62519]: _type = "Task" [ 1591.252645] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.262524] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.263527] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "refresh_cache-80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.264348] env[62519]: DEBUG nova.compute.manager [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Instance network_info: |[{"id": "3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8", "address": "fa:16:3e:b4:eb:90", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dd85b46-bf", "ovs_interfaceid": "3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1591.264563] env[62519]: DEBUG oslo_concurrency.lockutils [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] Acquired lock "refresh_cache-80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.264801] env[62519]: DEBUG nova.network.neutron [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Refreshing network info cache for port 3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1591.267137] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:eb:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1591.275692] env[62519]: DEBUG oslo.service.loopingcall [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.276873] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1591.276873] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4aba46c9-b6cd-45e5-be9e-36eabd402138 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.300866] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1591.300866] env[62519]: value = "task-1802462" [ 1591.300866] env[62519]: _type = "Task" [ 1591.300866] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.315569] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802462, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.344152] env[62519]: DEBUG oslo_vmware.api [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.399739] env[62519]: DEBUG nova.scheduler.client.report [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1591.408828] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.522246] env[62519]: DEBUG oslo_concurrency.lockutils [req-b59a9399-30fa-46b1-963d-abe7c6f34f52 req-96711da0-886e-4d7f-abe8-0e9c664b1641 service nova] Releasing lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.523093] env[62519]: DEBUG oslo_vmware.api [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802460, 'name': PowerOffVM_Task, 'duration_secs': 0.25229} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.523619] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquired lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.523914] env[62519]: DEBUG nova.network.neutron [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1591.525585] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1591.525836] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1591.526938] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38ccb490-5248-4f95-9185-319758c753dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.658346] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1591.658594] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1591.658782] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Deleting the datastore file [datastore1] 24cb49c8-b2ef-4ede-aea6-6e34081beca1 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1591.659092] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fed6fb42-b321-4ffd-9f43-0ef71c85d0b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.666974] env[62519]: DEBUG oslo_vmware.api [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1591.666974] env[62519]: value = "task-1802464" [ 1591.666974] env[62519]: _type = "Task" [ 1591.666974] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.677296] env[62519]: DEBUG oslo_vmware.api [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802464, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.765586] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802461, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.814144] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802462, 'name': CreateVM_Task, 'duration_secs': 0.505424} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.814144] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1591.814632] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.814833] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.815223] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1591.815565] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6566aceb-b55e-470d-be1e-323ad3bf5396 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.823243] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1591.823243] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b13ee6-d09e-eccf-e034-04e8fdd6ec68" [ 1591.823243] env[62519]: _type = "Task" [ 1591.823243] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.834677] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b13ee6-d09e-eccf-e034-04e8fdd6ec68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.845133] env[62519]: DEBUG oslo_vmware.api [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.634846} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.845472] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1591.845704] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1591.845943] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1591.846170] env[62519]: INFO nova.compute.manager [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1591.846456] env[62519]: DEBUG oslo.service.loopingcall [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.846694] env[62519]: DEBUG nova.compute.manager [-] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1591.846876] env[62519]: DEBUG nova.network.neutron [-] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1591.910163] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.833s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.910551] env[62519]: DEBUG nova.compute.manager [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1591.916118] env[62519]: DEBUG oslo_concurrency.lockutils [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.314s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.916359] env[62519]: DEBUG nova.objects.instance [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lazy-loading 'resources' on Instance uuid f19c860f-736a-4783-8ef5-8262040e53a3 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1592.182407] env[62519]: DEBUG oslo_vmware.api [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802464, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.243642] env[62519]: DEBUG nova.network.neutron [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Updated VIF entry in instance network info cache for port 3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1592.243642] env[62519]: DEBUG nova.network.neutron [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Updating instance_info_cache with network_info: [{"id": "3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8", "address": "fa:16:3e:b4:eb:90", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dd85b46-bf", "ovs_interfaceid": "3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.264385] env[62519]: DEBUG oslo_vmware.api [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802461, 'name': PowerOnVM_Task, 'duration_secs': 0.666123} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.265912] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1592.265912] env[62519]: INFO nova.compute.manager [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Took 11.48 seconds to spawn the instance on the hypervisor. [ 1592.266104] env[62519]: DEBUG nova.compute.manager [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1592.266948] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d91ad97-132a-426e-bd91-325c91b83c5f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.335899] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b13ee6-d09e-eccf-e034-04e8fdd6ec68, 'name': SearchDatastore_Task, 'duration_secs': 0.026205} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.336264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.336523] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1592.336814] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.337086] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.337282] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1592.337937] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36113b58-df35-4dc1-bff0-50a520a93299 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.420066] env[62519]: DEBUG nova.compute.utils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1592.425045] env[62519]: DEBUG nova.compute.manager [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1592.425135] env[62519]: DEBUG nova.network.neutron [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1592.468873] env[62519]: DEBUG nova.network.neutron [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Updating instance_info_cache with network_info: [{"id": "8d20f83b-f706-40ea-bba7-461aba73113f", "address": "fa:16:3e:c3:bc:6c", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d20f83b-f7", "ovs_interfaceid": "8d20f83b-f706-40ea-bba7-461aba73113f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.474850] env[62519]: DEBUG nova.compute.manager [req-43462618-c388-4d21-8b0b-4cb19f8ed46e req-73a83b5c-4753-4d44-8ed4-9b401f194a2a service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Received event network-vif-deleted-0a18744e-471e-4942-8a22-59aa90a680af {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1592.474850] env[62519]: INFO nova.compute.manager [req-43462618-c388-4d21-8b0b-4cb19f8ed46e req-73a83b5c-4753-4d44-8ed4-9b401f194a2a service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Neutron deleted interface 0a18744e-471e-4942-8a22-59aa90a680af; detaching it from the instance and deleting it from the info cache [ 1592.474850] env[62519]: DEBUG nova.network.neutron [req-43462618-c388-4d21-8b0b-4cb19f8ed46e req-73a83b5c-4753-4d44-8ed4-9b401f194a2a service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.496840] env[62519]: DEBUG nova.policy [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '062e47ab0e39404b99924877d3900c7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8929cfd4892c4e399062483665500dd9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1592.531691] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1592.531894] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1592.533124] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-815d17c5-8688-4d8b-8f4b-98461754bc45 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.539590] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1592.539590] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d297d7-2bfb-d596-b758-53213396b1c1" [ 1592.539590] env[62519]: _type = "Task" [ 1592.539590] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.555798] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d297d7-2bfb-d596-b758-53213396b1c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.649201] env[62519]: DEBUG nova.network.neutron [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Successfully updated port: b3cdcca9-9468-453b-894e-326f90b3cb34 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1592.682869] env[62519]: DEBUG oslo_vmware.api [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802464, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.932228} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.682869] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1592.682869] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1592.682869] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1592.682869] env[62519]: INFO nova.compute.manager [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1592.682869] env[62519]: DEBUG oslo.service.loopingcall [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1592.684293] env[62519]: DEBUG nova.compute.manager [-] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1592.684293] env[62519]: DEBUG nova.network.neutron [-] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1592.754933] env[62519]: DEBUG oslo_concurrency.lockutils [req-54bc3903-4c00-46c8-8dd4-6bab77b83a58 req-07416d67-0a74-4038-babe-0194f81034f4 service nova] Releasing lock "refresh_cache-80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.755392] env[62519]: DEBUG nova.network.neutron [-] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.792759] env[62519]: INFO nova.compute.manager [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Took 43.77 seconds to build instance. [ 1592.923717] env[62519]: DEBUG nova.compute.utils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1592.976520] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Releasing lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.980553] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc395c2b-12f0-4359-9ad8-470026f727d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.994818] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524d7817-13b0-51b3-660b-e68448fe2431/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1592.995890] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc3a335-28bb-4641-a141-464c43c9fefb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.007857] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f394da8-f0b0-4cbe-9efc-a5e519131a9a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.026898] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524d7817-13b0-51b3-660b-e68448fe2431/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1593.027101] env[62519]: ERROR oslo_vmware.rw_handles [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524d7817-13b0-51b3-660b-e68448fe2431/disk-0.vmdk due to incomplete transfer. [ 1593.027339] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9b2fbfd8-de93-4230-b876-ac6b02bbc565 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.035144] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524d7817-13b0-51b3-660b-e68448fe2431/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1593.035837] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Uploaded image 544d45fd-dca6-4eff-834b-684a6a46bb55 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1593.038714] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1593.057564] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ff442162-1c7f-461b-ba7e-9d362963844e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.060396] env[62519]: DEBUG nova.compute.manager [req-43462618-c388-4d21-8b0b-4cb19f8ed46e req-73a83b5c-4753-4d44-8ed4-9b401f194a2a service nova] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Detach interface failed, port_id=0a18744e-471e-4942-8a22-59aa90a680af, reason: Instance 2b68e95a-1d93-43ee-b0a6-996c9042f5c7 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1593.071639] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d297d7-2bfb-d596-b758-53213396b1c1, 'name': SearchDatastore_Task, 'duration_secs': 0.022423} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.076192] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1593.076192] env[62519]: value = "task-1802465" [ 1593.076192] env[62519]: _type = "Task" [ 1593.076192] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.076740] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-050428bc-6d92-4819-ab05-36ac1631f16b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.087193] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1593.087193] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525c9c35-f047-b96c-1a99-af78249107d5" [ 1593.087193] env[62519]: _type = "Task" [ 1593.087193] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.091235] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802465, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.103675] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525c9c35-f047-b96c-1a99-af78249107d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.116518] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71213a25-882e-438a-a85c-9b103094a232 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.121590] env[62519]: DEBUG nova.compute.manager [req-8e2b56bf-9a6f-4be5-99df-756b7039aa71 req-baf0caf2-8aad-4a6c-ae62-9b6c0f26a9f7 service nova] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Received event network-vif-plugged-b3cdcca9-9468-453b-894e-326f90b3cb34 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1593.121966] env[62519]: DEBUG oslo_concurrency.lockutils [req-8e2b56bf-9a6f-4be5-99df-756b7039aa71 req-baf0caf2-8aad-4a6c-ae62-9b6c0f26a9f7 service nova] Acquiring lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.122268] env[62519]: DEBUG oslo_concurrency.lockutils [req-8e2b56bf-9a6f-4be5-99df-756b7039aa71 req-baf0caf2-8aad-4a6c-ae62-9b6c0f26a9f7 service nova] Lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.122674] env[62519]: DEBUG oslo_concurrency.lockutils [req-8e2b56bf-9a6f-4be5-99df-756b7039aa71 req-baf0caf2-8aad-4a6c-ae62-9b6c0f26a9f7 service nova] Lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.122674] env[62519]: DEBUG nova.compute.manager [req-8e2b56bf-9a6f-4be5-99df-756b7039aa71 req-baf0caf2-8aad-4a6c-ae62-9b6c0f26a9f7 service nova] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] No waiting events found dispatching network-vif-plugged-b3cdcca9-9468-453b-894e-326f90b3cb34 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1593.122674] env[62519]: WARNING nova.compute.manager [req-8e2b56bf-9a6f-4be5-99df-756b7039aa71 req-baf0caf2-8aad-4a6c-ae62-9b6c0f26a9f7 service nova] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Received unexpected event network-vif-plugged-b3cdcca9-9468-453b-894e-326f90b3cb34 for instance with vm_state building and task_state spawning. [ 1593.128640] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269fef0f-d6c8-4ed5-97d8-834c3fdd9993 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.162372] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.162558] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.162702] env[62519]: DEBUG nova.network.neutron [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1593.164773] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cfe0c1-beb3-4b2b-8bcc-b71be6c38789 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.173540] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b27465-6e89-4c17-8234-1f4014eb5155 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.188683] env[62519]: DEBUG nova.compute.provider_tree [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1593.191037] env[62519]: DEBUG nova.network.neutron [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Successfully created port: 4406b619-4b6f-453e-a6e6-7f9f83d1b8ff {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1593.260127] env[62519]: INFO nova.compute.manager [-] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Took 1.41 seconds to deallocate network for instance. [ 1593.429958] env[62519]: DEBUG nova.compute.manager [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1593.484839] env[62519]: DEBUG nova.compute.manager [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1593.485785] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f10bf5-a13c-4528-a069-5320f805b699 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.525652] env[62519]: DEBUG nova.network.neutron [-] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.591478] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802465, 'name': Destroy_Task, 'duration_secs': 0.521184} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.594594] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Destroyed the VM [ 1593.594993] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1593.595271] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0a2f8920-0100-467d-9e61-b4809de563f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.601469] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525c9c35-f047-b96c-1a99-af78249107d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.602689] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1593.602689] env[62519]: value = "task-1802466" [ 1593.602689] env[62519]: _type = "Task" [ 1593.602689] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.609948] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802466, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.697155] env[62519]: DEBUG nova.scheduler.client.report [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1593.710873] env[62519]: DEBUG nova.network.neutron [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1593.769262] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.903948] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquiring lock "8659f63a-5df9-4ff8-84dd-0722026dc820" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.903948] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "8659f63a-5df9-4ff8-84dd-0722026dc820" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.947237] env[62519]: DEBUG nova.network.neutron [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Updating instance_info_cache with network_info: [{"id": "b3cdcca9-9468-453b-894e-326f90b3cb34", "address": "fa:16:3e:12:25:b1", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3cdcca9-94", "ovs_interfaceid": "b3cdcca9-9468-453b-894e-326f90b3cb34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.031210] env[62519]: INFO nova.compute.manager [-] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Took 1.35 seconds to deallocate network for instance. [ 1594.108642] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525c9c35-f047-b96c-1a99-af78249107d5, 'name': SearchDatastore_Task, 'duration_secs': 0.846602} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.113344] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.113483] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d/80ef3fd4-b9ef-4fd2-a991-feec78a0c81d.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1594.113836] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a0bc38c-4a25-4a2a-80ef-882667569203 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.125381] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802466, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.127837] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1594.127837] env[62519]: value = "task-1802467" [ 1594.127837] env[62519]: _type = "Task" [ 1594.127837] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.134750] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] Acquiring lock "refresh_cache-dac173ff-1807-405f-a59c-bb2efef62a0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.134750] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] Acquired lock "refresh_cache-dac173ff-1807-405f-a59c-bb2efef62a0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.135126] env[62519]: DEBUG nova.network.neutron [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1594.144097] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802467, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.205813] env[62519]: DEBUG oslo_concurrency.lockutils [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.288s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.208028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.292s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.208752] env[62519]: INFO nova.compute.claims [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1594.237248] env[62519]: INFO nova.scheduler.client.report [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Deleted allocations for instance f19c860f-736a-4783-8ef5-8262040e53a3 [ 1594.303698] env[62519]: DEBUG oslo_concurrency.lockutils [None req-07784f7b-8852-4eb7-b75c-baa3510afa4f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "dac173ff-1807-405f-a59c-bb2efef62a0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.538s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.443317] env[62519]: DEBUG nova.compute.manager [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1594.451053] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.451514] env[62519]: DEBUG nova.compute.manager [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Instance network_info: |[{"id": "b3cdcca9-9468-453b-894e-326f90b3cb34", "address": "fa:16:3e:12:25:b1", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3cdcca9-94", "ovs_interfaceid": "b3cdcca9-9468-453b-894e-326f90b3cb34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1594.451848] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:25:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3cdcca9-9468-453b-894e-326f90b3cb34', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1594.465780] env[62519]: DEBUG oslo.service.loopingcall [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1594.467023] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1594.467449] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-368325c2-9860-4219-923d-37a9854f887e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.497194] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T08:11:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1142907630',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-499517051',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1594.497194] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1594.497194] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1594.497194] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1594.497194] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1594.497634] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1594.498643] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1594.498948] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1594.499276] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1594.499567] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1594.499891] env[62519]: DEBUG nova.virt.hardware [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1594.500934] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4a76ef-094b-400a-8fae-e5fdad0eec86 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.506045] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1594.506045] env[62519]: value = "task-1802468" [ 1594.506045] env[62519]: _type = "Task" [ 1594.506045] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.507073] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef5015c-a9ca-4184-a03e-9c320b63ee2d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.521038] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495342c9-d05a-4841-b314-d77a5865753d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.531056] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Doing hard reboot of VM {{(pid=62519) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1594.531506] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802468, 'name': CreateVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.532277] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-0eda8f77-fc93-46ce-a4ba-22356fe511cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.544941] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.550344] env[62519]: DEBUG oslo_vmware.api [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1594.550344] env[62519]: value = "task-1802469" [ 1594.550344] env[62519]: _type = "Task" [ 1594.550344] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.558953] env[62519]: DEBUG oslo_vmware.api [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802469, 'name': ResetVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.620090] env[62519]: DEBUG oslo_vmware.api [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802466, 'name': RemoveSnapshot_Task, 'duration_secs': 0.614569} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.620430] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1594.620716] env[62519]: INFO nova.compute.manager [None req-b8aad1ad-699a-4e07-8919-0f4cbeaeb95e tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Took 14.58 seconds to snapshot the instance on the hypervisor. [ 1594.642074] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802467, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.750918] env[62519]: DEBUG oslo_concurrency.lockutils [None req-428b10c4-d0cd-466c-a6ce-0e461fcf51cf tempest-InstanceActionsTestJSON-366970633 tempest-InstanceActionsTestJSON-366970633-project-member] Lock "f19c860f-736a-4783-8ef5-8262040e53a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.243s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.806475] env[62519]: DEBUG nova.compute.manager [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1594.918352] env[62519]: DEBUG nova.network.neutron [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Updating instance_info_cache with network_info: [{"id": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "address": "fa:16:3e:9c:1f:3e", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb52070a-ff", "ovs_interfaceid": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.027184] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802468, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.060462] env[62519]: DEBUG oslo_vmware.api [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802469, 'name': ResetVM_Task, 'duration_secs': 0.186006} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.060725] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Did hard reboot of VM {{(pid=62519) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1595.060914] env[62519]: DEBUG nova.compute.manager [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1595.061666] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1986400-18db-4375-8203-e19e32ab254a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.140370] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802467, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70042} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.140667] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d/80ef3fd4-b9ef-4fd2-a991-feec78a0c81d.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1595.140880] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1595.141611] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-441a1d04-eb2a-4bc3-9e9c-e848eebdf9a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.149225] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1595.149225] env[62519]: value = "task-1802470" [ 1595.149225] env[62519]: _type = "Task" [ 1595.149225] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.157041] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802470, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.339775] env[62519]: DEBUG oslo_concurrency.lockutils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.420630] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] Releasing lock "refresh_cache-dac173ff-1807-405f-a59c-bb2efef62a0c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.420880] env[62519]: DEBUG nova.compute.manager [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Inject network info {{(pid=62519) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7696}} [ 1595.421150] env[62519]: DEBUG nova.compute.manager [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] network_info to inject: |[{"id": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "address": "fa:16:3e:9c:1f:3e", "network": {"id": "f59f09e8-6201-4c07-9809-ef4c213de15f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-906781315-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "555fa612766f4b5fa173664ca3fa496c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb52070a-ff", "ovs_interfaceid": "bb52070a-ffaf-478e-b8c5-9273d58a0b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7697}} [ 1595.425875] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Reconfiguring VM instance to set the machine id {{(pid=62519) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1595.428698] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd421a84-e9e3-4ac2-adc3-b3d51fc3ad22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.444182] env[62519]: DEBUG oslo_vmware.api [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] Waiting for the task: (returnval){ [ 1595.444182] env[62519]: value = "task-1802471" [ 1595.444182] env[62519]: _type = "Task" [ 1595.444182] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.455317] env[62519]: DEBUG oslo_vmware.api [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] Task: {'id': task-1802471, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.523607] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802468, 'name': CreateVM_Task, 'duration_secs': 1.003365} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.523607] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1595.524034] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.524199] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.524936] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1595.524936] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-655d94f7-18d4-41a4-8446-bc6ee9dd3428 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.531805] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1595.531805] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521642a8-cfed-9e3b-0eb1-e06d5238e7b5" [ 1595.531805] env[62519]: _type = "Task" [ 1595.531805] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.540059] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521642a8-cfed-9e3b-0eb1-e06d5238e7b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.574589] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ace482d-d654-44d6-95b3-9912a2e4b9f6 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.013s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.664077] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802470, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073014} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.664357] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1595.665223] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6760e9b-1f07-4f9a-b52a-c41b78bf26f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.691318] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d/80ef3fd4-b9ef-4fd2-a991-feec78a0c81d.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1595.694257] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6593ab07-78db-4f5e-9592-ab705dfbfce4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.720216] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1595.720216] env[62519]: value = "task-1802472" [ 1595.720216] env[62519]: _type = "Task" [ 1595.720216] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.731665] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802472, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.761110] env[62519]: DEBUG nova.network.neutron [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Successfully updated port: 4406b619-4b6f-453e-a6e6-7f9f83d1b8ff {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1595.831335] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bace35dd-4206-4f4d-b611-293ce3341c66 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.839599] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fead506f-7781-46bc-8346-bbcf152558f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.845859] env[62519]: DEBUG nova.compute.manager [req-1456c686-ceb6-4dc1-a17f-24f7ba6d0dc1 req-036f1fa4-1432-407a-b72e-c7f44e0bb6d0 service nova] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Received event network-changed-b3cdcca9-9468-453b-894e-326f90b3cb34 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1595.846056] env[62519]: DEBUG nova.compute.manager [req-1456c686-ceb6-4dc1-a17f-24f7ba6d0dc1 req-036f1fa4-1432-407a-b72e-c7f44e0bb6d0 service nova] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Refreshing instance network info cache due to event network-changed-b3cdcca9-9468-453b-894e-326f90b3cb34. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1595.846278] env[62519]: DEBUG oslo_concurrency.lockutils [req-1456c686-ceb6-4dc1-a17f-24f7ba6d0dc1 req-036f1fa4-1432-407a-b72e-c7f44e0bb6d0 service nova] Acquiring lock "refresh_cache-302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.846451] env[62519]: DEBUG oslo_concurrency.lockutils [req-1456c686-ceb6-4dc1-a17f-24f7ba6d0dc1 req-036f1fa4-1432-407a-b72e-c7f44e0bb6d0 service nova] Acquired lock "refresh_cache-302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.847146] env[62519]: DEBUG nova.network.neutron [req-1456c686-ceb6-4dc1-a17f-24f7ba6d0dc1 req-036f1fa4-1432-407a-b72e-c7f44e0bb6d0 service nova] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Refreshing network info cache for port b3cdcca9-9468-453b-894e-326f90b3cb34 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1595.880770] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b675da87-4d2d-41c0-bffb-283af614b25f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.889577] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb10d0e-8d0d-4979-b3a1-d16d2afb796d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.903717] env[62519]: DEBUG nova.compute.provider_tree [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.955446] env[62519]: DEBUG oslo_vmware.api [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] Task: {'id': task-1802471, 'name': ReconfigVM_Task, 'duration_secs': 0.349999} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.955717] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc8c13a-a465-44ac-8355-c254139b86fe tempest-ServersAdminTestJSON-988532011 tempest-ServersAdminTestJSON-988532011-project-admin] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Reconfigured VM instance to set the machine id {{(pid=62519) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1595.974120] env[62519]: DEBUG nova.compute.manager [req-6b411fd4-735f-4c69-bdcc-389931a22c81 req-e70bacc8-55d2-4c05-abb5-c7adc1a15532 service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Received event network-vif-plugged-4406b619-4b6f-453e-a6e6-7f9f83d1b8ff {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1595.974388] env[62519]: DEBUG oslo_concurrency.lockutils [req-6b411fd4-735f-4c69-bdcc-389931a22c81 req-e70bacc8-55d2-4c05-abb5-c7adc1a15532 service nova] Acquiring lock "f0925a44-c15b-4415-99bc-1b2366292fe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.974551] env[62519]: DEBUG oslo_concurrency.lockutils [req-6b411fd4-735f-4c69-bdcc-389931a22c81 req-e70bacc8-55d2-4c05-abb5-c7adc1a15532 service nova] Lock "f0925a44-c15b-4415-99bc-1b2366292fe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.974762] env[62519]: DEBUG oslo_concurrency.lockutils [req-6b411fd4-735f-4c69-bdcc-389931a22c81 req-e70bacc8-55d2-4c05-abb5-c7adc1a15532 service nova] Lock "f0925a44-c15b-4415-99bc-1b2366292fe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.974946] env[62519]: DEBUG nova.compute.manager [req-6b411fd4-735f-4c69-bdcc-389931a22c81 req-e70bacc8-55d2-4c05-abb5-c7adc1a15532 service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] No waiting events found dispatching network-vif-plugged-4406b619-4b6f-453e-a6e6-7f9f83d1b8ff {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1595.975130] env[62519]: WARNING nova.compute.manager [req-6b411fd4-735f-4c69-bdcc-389931a22c81 req-e70bacc8-55d2-4c05-abb5-c7adc1a15532 service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Received unexpected event network-vif-plugged-4406b619-4b6f-453e-a6e6-7f9f83d1b8ff for instance with vm_state building and task_state spawning. [ 1596.044603] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521642a8-cfed-9e3b-0eb1-e06d5238e7b5, 'name': SearchDatastore_Task, 'duration_secs': 0.054123} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.044936] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.045271] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1596.045415] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.045559] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.045736] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1596.046007] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ecffb75-760f-4fd9-908c-4e31e582eb26 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.057780] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1596.058008] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1596.058754] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fafd47c1-7c3e-4c28-a5be-f7c913217c62 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.064340] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1596.064340] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5272d717-f7ae-8d79-d227-ba3725804782" [ 1596.064340] env[62519]: _type = "Task" [ 1596.064340] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.072337] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5272d717-f7ae-8d79-d227-ba3725804782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.236149] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802472, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.265817] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "refresh_cache-f0925a44-c15b-4415-99bc-1b2366292fe4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.265974] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquired lock "refresh_cache-f0925a44-c15b-4415-99bc-1b2366292fe4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.266188] env[62519]: DEBUG nova.network.neutron [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1596.410028] env[62519]: DEBUG nova.scheduler.client.report [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1596.582346] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5272d717-f7ae-8d79-d227-ba3725804782, 'name': SearchDatastore_Task, 'duration_secs': 0.022086} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.583447] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98adc3d8-eed4-43a2-841f-ff85f5a67227 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.591124] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1596.591124] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a7b5e7-34c6-6f37-8443-900917a25ecc" [ 1596.591124] env[62519]: _type = "Task" [ 1596.591124] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.602637] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a7b5e7-34c6-6f37-8443-900917a25ecc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.728305] env[62519]: DEBUG nova.network.neutron [req-1456c686-ceb6-4dc1-a17f-24f7ba6d0dc1 req-036f1fa4-1432-407a-b72e-c7f44e0bb6d0 service nova] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Updated VIF entry in instance network info cache for port b3cdcca9-9468-453b-894e-326f90b3cb34. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1596.728859] env[62519]: DEBUG nova.network.neutron [req-1456c686-ceb6-4dc1-a17f-24f7ba6d0dc1 req-036f1fa4-1432-407a-b72e-c7f44e0bb6d0 service nova] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Updating instance_info_cache with network_info: [{"id": "b3cdcca9-9468-453b-894e-326f90b3cb34", "address": "fa:16:3e:12:25:b1", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3cdcca9-94", "ovs_interfaceid": "b3cdcca9-9468-453b-894e-326f90b3cb34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.741063] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802472, 'name': ReconfigVM_Task, 'duration_secs': 0.804849} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.741352] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d/80ef3fd4-b9ef-4fd2-a991-feec78a0c81d.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1596.742572] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1bc4441-7385-456c-96a2-3599a7c9cf1d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.751668] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1596.751668] env[62519]: value = "task-1802473" [ 1596.751668] env[62519]: _type = "Task" [ 1596.751668] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.762213] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802473, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.822118] env[62519]: DEBUG nova.network.neutron [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1596.915225] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.708s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.915508] env[62519]: DEBUG nova.compute.manager [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1596.918609] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.385s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.920105] env[62519]: INFO nova.compute.claims [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1597.104600] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a7b5e7-34c6-6f37-8443-900917a25ecc, 'name': SearchDatastore_Task, 'duration_secs': 0.011316} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.104894] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.105214] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1/302edcd3-bd6e-41da-b731-4d4c1bb5c3c1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1597.105612] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d418d88-5410-4abe-bcfd-d73f35b32c9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.113550] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1597.113550] env[62519]: value = "task-1802474" [ 1597.113550] env[62519]: _type = "Task" [ 1597.113550] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.124045] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802474, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.214236] env[62519]: DEBUG nova.network.neutron [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Updating instance_info_cache with network_info: [{"id": "4406b619-4b6f-453e-a6e6-7f9f83d1b8ff", "address": "fa:16:3e:e2:5e:d4", "network": {"id": "87dc972b-a043-4db2-b67a-2efe6d40d1aa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1993987139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8929cfd4892c4e399062483665500dd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4406b619-4b", "ovs_interfaceid": "4406b619-4b6f-453e-a6e6-7f9f83d1b8ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.236679] env[62519]: DEBUG oslo_concurrency.lockutils [req-1456c686-ceb6-4dc1-a17f-24f7ba6d0dc1 req-036f1fa4-1432-407a-b72e-c7f44e0bb6d0 service nova] Releasing lock "refresh_cache-302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.236679] env[62519]: DEBUG nova.compute.manager [req-1456c686-ceb6-4dc1-a17f-24f7ba6d0dc1 req-036f1fa4-1432-407a-b72e-c7f44e0bb6d0 service nova] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Received event network-vif-deleted-0ac63864-3cdd-498e-b28b-054b97ccd1db {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1597.266368] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802473, 'name': Rename_Task, 'duration_secs': 0.139154} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.267353] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1597.267353] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e491fc04-bdec-4618-9c07-eb421e82d7b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.273949] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1597.273949] env[62519]: value = "task-1802475" [ 1597.273949] env[62519]: _type = "Task" [ 1597.273949] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.283768] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.429731] env[62519]: DEBUG nova.compute.utils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1597.434762] env[62519]: DEBUG nova.compute.manager [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1597.434762] env[62519]: DEBUG nova.network.neutron [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1597.542143] env[62519]: DEBUG nova.policy [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c586ba4158b947b0a25d1614c17ebb51', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12977ed65a1b410a987b049e9d1dce3e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1597.628995] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802474, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.716127] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Releasing lock "refresh_cache-f0925a44-c15b-4415-99bc-1b2366292fe4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.716477] env[62519]: DEBUG nova.compute.manager [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Instance network_info: |[{"id": "4406b619-4b6f-453e-a6e6-7f9f83d1b8ff", "address": "fa:16:3e:e2:5e:d4", "network": {"id": "87dc972b-a043-4db2-b67a-2efe6d40d1aa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1993987139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8929cfd4892c4e399062483665500dd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4406b619-4b", "ovs_interfaceid": "4406b619-4b6f-453e-a6e6-7f9f83d1b8ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1597.716949] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:5e:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b8137fc-f23d-49b1-b19c-3123a5588f34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4406b619-4b6f-453e-a6e6-7f9f83d1b8ff', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1597.724967] env[62519]: DEBUG oslo.service.loopingcall [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1597.725218] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1597.725467] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fc0c9f2-64ad-4797-9b98-ca48bbd59163 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.748687] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1597.748687] env[62519]: value = "task-1802476" [ 1597.748687] env[62519]: _type = "Task" [ 1597.748687] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.758217] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802476, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.784302] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802475, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.939912] env[62519]: DEBUG nova.compute.manager [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1598.073815] env[62519]: DEBUG nova.network.neutron [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Successfully created port: c3e2a054-4826-4bd6-8c9e-74005e7912e4 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1598.131719] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802474, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602534} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.132048] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1/302edcd3-bd6e-41da-b731-4d4c1bb5c3c1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1598.132246] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1598.132499] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a3a2494-3345-4149-8196-a6d2f737c4a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.143181] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1598.143181] env[62519]: value = "task-1802477" [ 1598.143181] env[62519]: _type = "Task" [ 1598.143181] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.151650] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.179172] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquiring lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.179441] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.179627] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquiring lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.179803] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.179956] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.188014] env[62519]: INFO nova.compute.manager [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Terminating instance [ 1598.265062] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802476, 'name': CreateVM_Task, 'duration_secs': 0.432163} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.266389] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1598.266389] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.266916] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.267303] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1598.267597] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bb98ff3-567f-4e1f-bb55-85f106626694 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.276368] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1598.276368] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522f198b-a38a-4f55-565b-eb30f7a76b46" [ 1598.276368] env[62519]: _type = "Task" [ 1598.276368] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.292820] env[62519]: DEBUG oslo_vmware.api [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802475, 'name': PowerOnVM_Task, 'duration_secs': 0.706687} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.295986] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1598.297610] env[62519]: INFO nova.compute.manager [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Took 9.99 seconds to spawn the instance on the hypervisor. [ 1598.297610] env[62519]: DEBUG nova.compute.manager [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1598.297610] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522f198b-a38a-4f55-565b-eb30f7a76b46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.297610] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45069c3a-94ad-4713-ab88-1050565d4182 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.578018] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4acd7a-33d6-4863-a6e2-b68a41b8e36c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.583299] env[62519]: DEBUG nova.compute.manager [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Received event network-changed-4406b619-4b6f-453e-a6e6-7f9f83d1b8ff {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1598.583525] env[62519]: DEBUG nova.compute.manager [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Refreshing instance network info cache due to event network-changed-4406b619-4b6f-453e-a6e6-7f9f83d1b8ff. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1598.583743] env[62519]: DEBUG oslo_concurrency.lockutils [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] Acquiring lock "refresh_cache-f0925a44-c15b-4415-99bc-1b2366292fe4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.584150] env[62519]: DEBUG oslo_concurrency.lockutils [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] Acquired lock "refresh_cache-f0925a44-c15b-4415-99bc-1b2366292fe4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.584361] env[62519]: DEBUG nova.network.neutron [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Refreshing network info cache for port 4406b619-4b6f-453e-a6e6-7f9f83d1b8ff {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1598.591462] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9400dad-aaba-468f-8808-449133c91107 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.626506] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1888c0f-686d-4054-b229-67121917a46b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.633973] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78857e9-72b0-457c-8b5b-4601bc8081c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.648794] env[62519]: DEBUG nova.compute.provider_tree [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1598.657948] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075528} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.660735] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1598.660735] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14f1db9-102b-4b79-9308-2f78ac9ce83b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.681524] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1/302edcd3-bd6e-41da-b731-4d4c1bb5c3c1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1598.683854] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc437a47-6e0d-46e1-a8e0-2eaa89498a00 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.700319] env[62519]: DEBUG nova.compute.manager [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1598.700529] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1598.701348] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abac5bc1-357a-4922-ae42-f8f18438b1b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.711777] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1598.711777] env[62519]: value = "task-1802478" [ 1598.711777] env[62519]: _type = "Task" [ 1598.711777] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.717330] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1598.717901] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0217a94-2827-49fb-b6e7-cdc4363278f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.723304] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802478, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.725525] env[62519]: DEBUG oslo_vmware.api [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1598.725525] env[62519]: value = "task-1802479" [ 1598.725525] env[62519]: _type = "Task" [ 1598.725525] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.735928] env[62519]: DEBUG oslo_vmware.api [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.791955] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522f198b-a38a-4f55-565b-eb30f7a76b46, 'name': SearchDatastore_Task, 'duration_secs': 0.017794} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.792307] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.792535] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1598.792764] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.792909] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.793154] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1598.793428] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9bc0c1e-2b28-4e11-b0ee-8cf5382b5ce5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.802115] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1598.802348] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1598.803046] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b36690bf-852d-4ddd-a1c1-66ae378ea4a4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.809209] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1598.809209] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a245c2-a980-6905-39d7-10bf5f40167d" [ 1598.809209] env[62519]: _type = "Task" [ 1598.809209] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.826425] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a245c2-a980-6905-39d7-10bf5f40167d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.826970] env[62519]: INFO nova.compute.manager [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Took 42.96 seconds to build instance. [ 1598.963180] env[62519]: DEBUG nova.compute.manager [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1599.001651] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1599.001841] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1599.001946] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1599.002149] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1599.002299] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1599.002447] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1599.002652] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1599.002805] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1599.003315] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1599.003315] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1599.003420] env[62519]: DEBUG nova.virt.hardware [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1599.004304] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3344c363-c553-4af5-904a-c0973c1c5f9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.013403] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c27da14-efcf-48b1-944f-ec90a4d45472 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.155884] env[62519]: DEBUG nova.scheduler.client.report [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1599.211730] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "ad374dd9-a92d-4b76-9609-7562346e05a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.212322] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.212651] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "ad374dd9-a92d-4b76-9609-7562346e05a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.212814] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.213302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.215204] env[62519]: INFO nova.compute.manager [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Terminating instance [ 1599.228426] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.239416] env[62519]: DEBUG oslo_vmware.api [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802479, 'name': PowerOffVM_Task, 'duration_secs': 0.346536} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.239695] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1599.239862] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1599.240148] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abcd961d-0313-43b7-a9c3-f7b5b93a10e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.323828] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a245c2-a980-6905-39d7-10bf5f40167d, 'name': SearchDatastore_Task, 'duration_secs': 0.012801} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.324694] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-206868be-7b87-4632-8351-dfbf16bf85e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.333386] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1599.333386] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5268096c-c557-181c-77a0-2a5847de66bf" [ 1599.333386] env[62519]: _type = "Task" [ 1599.333386] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.345039] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5268096c-c557-181c-77a0-2a5847de66bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.367744] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1599.368093] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1599.368310] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Deleting the datastore file [datastore1] 4e3dee19-b99a-4257-88da-1b0531e2c0f9 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1599.368619] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6ec3812-38c3-4e84-83f1-f9d3d3abe199 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.377575] env[62519]: DEBUG oslo_vmware.api [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for the task: (returnval){ [ 1599.377575] env[62519]: value = "task-1802481" [ 1599.377575] env[62519]: _type = "Task" [ 1599.377575] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.397745] env[62519]: DEBUG oslo_vmware.api [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.508622] env[62519]: DEBUG nova.network.neutron [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Updated VIF entry in instance network info cache for port 4406b619-4b6f-453e-a6e6-7f9f83d1b8ff. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1599.508622] env[62519]: DEBUG nova.network.neutron [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Updating instance_info_cache with network_info: [{"id": "4406b619-4b6f-453e-a6e6-7f9f83d1b8ff", "address": "fa:16:3e:e2:5e:d4", "network": {"id": "87dc972b-a043-4db2-b67a-2efe6d40d1aa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1993987139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8929cfd4892c4e399062483665500dd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4406b619-4b", "ovs_interfaceid": "4406b619-4b6f-453e-a6e6-7f9f83d1b8ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.663012] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.744s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.663579] env[62519]: DEBUG nova.compute.manager [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1599.670540] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 31.044s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.723312] env[62519]: DEBUG nova.compute.manager [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1599.723859] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1599.724619] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a15b1f-f678-44f3-a5c6-236a69c074a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.739019] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802478, 'name': ReconfigVM_Task, 'duration_secs': 0.704143} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.739019] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1599.739019] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1/302edcd3-bd6e-41da-b731-4d4c1bb5c3c1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1599.739019] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0066f362-e382-4b31-aae8-980c45fbc6cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.739375] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4afc6c8-105b-43b0-953a-439e5f16ed95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.747888] env[62519]: DEBUG oslo_vmware.api [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1599.747888] env[62519]: value = "task-1802482" [ 1599.747888] env[62519]: _type = "Task" [ 1599.747888] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.749347] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1599.749347] env[62519]: value = "task-1802483" [ 1599.749347] env[62519]: _type = "Task" [ 1599.749347] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.765459] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802483, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.769274] env[62519]: DEBUG oslo_vmware.api [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.852636] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5268096c-c557-181c-77a0-2a5847de66bf, 'name': SearchDatastore_Task, 'duration_secs': 0.01297} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.852636] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.852896] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f0925a44-c15b-4415-99bc-1b2366292fe4/f0925a44-c15b-4415-99bc-1b2366292fe4.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1599.853232] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9a4e110-b38f-413b-b8c8-9bb27971853c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.863172] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1599.863172] env[62519]: value = "task-1802484" [ 1599.863172] env[62519]: _type = "Task" [ 1599.863172] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.876940] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.890793] env[62519]: DEBUG oslo_vmware.api [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Task: {'id': task-1802481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.460132} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.891123] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1599.891357] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1599.891576] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1599.891785] env[62519]: INFO nova.compute.manager [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1599.892087] env[62519]: DEBUG oslo.service.loopingcall [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1599.892317] env[62519]: DEBUG nova.compute.manager [-] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1599.892515] env[62519]: DEBUG nova.network.neutron [-] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1600.010965] env[62519]: DEBUG oslo_concurrency.lockutils [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] Releasing lock "refresh_cache-f0925a44-c15b-4415-99bc-1b2366292fe4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1600.011264] env[62519]: DEBUG nova.compute.manager [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Received event network-changed-8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1600.011438] env[62519]: DEBUG nova.compute.manager [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Refreshing instance network info cache due to event network-changed-8d20f83b-f706-40ea-bba7-461aba73113f. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1600.011651] env[62519]: DEBUG oslo_concurrency.lockutils [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] Acquiring lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.011872] env[62519]: DEBUG oslo_concurrency.lockutils [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] Acquired lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.011933] env[62519]: DEBUG nova.network.neutron [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Refreshing network info cache for port 8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1600.163757] env[62519]: DEBUG nova.network.neutron [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Successfully updated port: c3e2a054-4826-4bd6-8c9e-74005e7912e4 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1600.179410] env[62519]: DEBUG nova.compute.utils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1600.194357] env[62519]: DEBUG nova.compute.manager [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1600.201023] env[62519]: DEBUG nova.compute.manager [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1600.201023] env[62519]: DEBUG nova.network.neutron [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1600.261695] env[62519]: DEBUG oslo_vmware.api [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802482, 'name': PowerOffVM_Task, 'duration_secs': 0.226493} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.267028] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1600.267028] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1600.267028] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-279b872c-b1c0-4413-95a6-b368f546390b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.270886] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802483, 'name': Rename_Task, 'duration_secs': 0.202632} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.273022] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1600.273022] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-607a08b0-1079-4c27-8006-a2c914903546 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.281878] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1600.281878] env[62519]: value = "task-1802486" [ 1600.281878] env[62519]: _type = "Task" [ 1600.281878] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.292169] env[62519]: DEBUG nova.policy [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95893859ec5841cf9d739a73be88f3fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08ccb6256cb446e1837e04580892a31a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1600.301909] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802486, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.341522] env[62519]: DEBUG oslo_concurrency.lockutils [None req-040512d1-e180-4bd4-99a9-22e550a80706 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.213s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.380757] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802484, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.383158] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1600.384481] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1600.384481] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Deleting the datastore file [datastore1] ad374dd9-a92d-4b76-9609-7562346e05a8 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1600.385241] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3088d55-1606-4357-a30b-222941fd59eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.395039] env[62519]: DEBUG oslo_vmware.api [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1600.395039] env[62519]: value = "task-1802487" [ 1600.395039] env[62519]: _type = "Task" [ 1600.395039] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.406406] env[62519]: DEBUG oslo_vmware.api [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.667197] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.667197] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.667197] env[62519]: DEBUG nova.network.neutron [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1600.702836] env[62519]: INFO nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating resource usage from migration 33e7914a-348b-43c3-a3c3-7f65f496ad5a [ 1600.741413] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 11d4a010-959f-4f53-94dc-7499007612ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.741499] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 34d2991e-b6df-473d-8994-e45ff57ef131 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.742597] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 099112ae-569b-4853-bc47-b0b8b97d2525 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.742597] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c61c893f-826b-4874-b253-de6fbffa9e5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.742597] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance fe350d30-6fbd-4813-9634-ed05984fecfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.742597] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4a29bff8-050a-4ad5-9d06-3a59c40b97ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.742597] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f59a31e4-7fb9-4de7-b35f-da811a305f85 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.742597] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 1d4b14d3-8832-457e-aaed-462236555f57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.742880] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 2b68e95a-1d93-43ee-b0a6-996c9042f5c7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1600.742880] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance a59be5e6-2316-4766-933a-4d01dfe4fec1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.742953] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 09eefc1a-011b-4d2c-ab75-a1fcee740907 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.746379] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8070aa59-3547-460a-b914-0e84620023d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.746379] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 27f9e890-4733-43aa-9bf1-351d42d75418 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.746379] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4e3dee19-b99a-4257-88da-1b0531e2c0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.746379] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 24cb49c8-b2ef-4ede-aea6-6e34081beca1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1600.746379] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance d8780c40-0099-4ccc-84ae-72fbb14fa1ee is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1600.746379] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 1b4efc0f-474f-4a32-b199-c14f27b183e2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1600.746379] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 765cf18e-53a0-4cc6-ad0e-337a6f68915c actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.746379] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance ad374dd9-a92d-4b76-9609-7562346e05a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.746379] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance dac173ff-1807-405f-a59c-bb2efef62a0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.746379] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.746379] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f0925a44-c15b-4415-99bc-1b2366292fe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.747030] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.747030] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.747030] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 9ac3344d-219a-487f-b83f-96c17cd86dad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.793745] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802486, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.839721] env[62519]: DEBUG nova.network.neutron [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Successfully created port: 883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1600.846652] env[62519]: DEBUG nova.compute.manager [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1600.867721] env[62519]: DEBUG nova.network.neutron [-] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1600.881035] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552456} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.881035] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f0925a44-c15b-4415-99bc-1b2366292fe4/f0925a44-c15b-4415-99bc-1b2366292fe4.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1600.881035] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1600.881035] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-382e8e71-80cb-4a2e-831b-4241ad7c1cab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.888941] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1600.888941] env[62519]: value = "task-1802488" [ 1600.888941] env[62519]: _type = "Task" [ 1600.888941] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.908884] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.915106] env[62519]: DEBUG oslo_vmware.api [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209204} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.915106] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1600.915106] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1600.915106] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1600.915106] env[62519]: INFO nova.compute.manager [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1600.915490] env[62519]: DEBUG oslo.service.loopingcall [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1600.915490] env[62519]: DEBUG nova.compute.manager [-] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1600.915573] env[62519]: DEBUG nova.network.neutron [-] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1600.938127] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.938437] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.938659] env[62519]: DEBUG nova.compute.manager [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1600.939600] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbc970f-3e40-4452-8ed0-b9c4dbccdd59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.951055] env[62519]: DEBUG nova.compute.manager [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1600.951679] env[62519]: DEBUG nova.objects.instance [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lazy-loading 'flavor' on Instance uuid 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1601.159738] env[62519]: DEBUG nova.network.neutron [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Updated VIF entry in instance network info cache for port 8d20f83b-f706-40ea-bba7-461aba73113f. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1601.160158] env[62519]: DEBUG nova.network.neutron [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Updating instance_info_cache with network_info: [{"id": "8d20f83b-f706-40ea-bba7-461aba73113f", "address": "fa:16:3e:c3:bc:6c", "network": {"id": "9dc148fb-a4d7-47bd-b004-56a2552812eb", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1607343584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d044d408a8674580b0f5cd52ca6e756d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d20f83b-f7", "ovs_interfaceid": "8d20f83b-f706-40ea-bba7-461aba73113f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.215539] env[62519]: DEBUG nova.compute.manager [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1601.241105] env[62519]: DEBUG nova.network.neutron [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1601.252990] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1601.253839] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1601.253839] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1601.253839] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1601.253995] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1601.254049] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1601.254279] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1601.254510] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1601.254774] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1601.254941] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1601.255165] env[62519]: DEBUG nova.virt.hardware [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1601.256337] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c60f5d73-9d6d-4b5f-b71b-00b6b787d482 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1601.260619] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0780b6e3-194d-4725-af47-a12d86adb88d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.269070] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e740390b-338a-4a85-a51f-bd1f55d4a922 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.309898] env[62519]: DEBUG oslo_vmware.api [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802486, 'name': PowerOnVM_Task, 'duration_secs': 0.603332} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.310276] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1601.310489] env[62519]: INFO nova.compute.manager [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Took 10.21 seconds to spawn the instance on the hypervisor. [ 1601.310665] env[62519]: DEBUG nova.compute.manager [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1601.311526] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98df43b-d882-4399-b8e7-8fa86ca3a5a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.375926] env[62519]: INFO nova.compute.manager [-] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Took 1.48 seconds to deallocate network for instance. [ 1601.396535] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.401821] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802488, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077858} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.403653] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1601.403865] env[62519]: DEBUG nova.compute.manager [req-9644319e-6409-429a-8c5e-d9df004d0961 req-777f46ff-7fd2-42b5-abb7-ca232d694c2c service nova] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Received event network-vif-deleted-d23f0b24-455b-4112-8518-1eaca05eb428 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1601.408120] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70dfa643-60be-46c6-8e58-5a2c904f16ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.437373] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] f0925a44-c15b-4415-99bc-1b2366292fe4/f0925a44-c15b-4415-99bc-1b2366292fe4.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1601.438167] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f6bd821-8567-41c8-b7f8-366fd7fe18a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.460277] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1601.460277] env[62519]: value = "task-1802489" [ 1601.460277] env[62519]: _type = "Task" [ 1601.460277] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.473371] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802489, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.538125] env[62519]: DEBUG nova.network.neutron [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.668026] env[62519]: DEBUG oslo_concurrency.lockutils [req-fd61d954-5ca8-48fe-9303-2aebd28d9595 req-7ac967e0-e4ad-4c17-a4eb-1abd057e7603 service nova] Releasing lock "refresh_cache-ad374dd9-a92d-4b76-9609-7562346e05a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.719395] env[62519]: DEBUG nova.compute.manager [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Received event network-vif-plugged-c3e2a054-4826-4bd6-8c9e-74005e7912e4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1601.719395] env[62519]: DEBUG oslo_concurrency.lockutils [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] Acquiring lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.719395] env[62519]: DEBUG oslo_concurrency.lockutils [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.719395] env[62519]: DEBUG oslo_concurrency.lockutils [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.719395] env[62519]: DEBUG nova.compute.manager [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] No waiting events found dispatching network-vif-plugged-c3e2a054-4826-4bd6-8c9e-74005e7912e4 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1601.719395] env[62519]: WARNING nova.compute.manager [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Received unexpected event network-vif-plugged-c3e2a054-4826-4bd6-8c9e-74005e7912e4 for instance with vm_state building and task_state spawning. [ 1601.719395] env[62519]: DEBUG nova.compute.manager [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Received event network-changed-c3e2a054-4826-4bd6-8c9e-74005e7912e4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1601.719395] env[62519]: DEBUG nova.compute.manager [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Refreshing instance network info cache due to event network-changed-c3e2a054-4826-4bd6-8c9e-74005e7912e4. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1601.719395] env[62519]: DEBUG oslo_concurrency.lockutils [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] Acquiring lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.762733] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1601.830950] env[62519]: INFO nova.compute.manager [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Took 45.18 seconds to build instance. [ 1601.884448] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.961988] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1601.962379] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c11eb278-334a-412b-9503-f1aa7c80c533 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.976536] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802489, 'name': ReconfigVM_Task, 'duration_secs': 0.304212} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.978088] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Reconfigured VM instance instance-00000032 to attach disk [datastore1] f0925a44-c15b-4415-99bc-1b2366292fe4/f0925a44-c15b-4415-99bc-1b2366292fe4.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1601.978440] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62519) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1601.979203] env[62519]: DEBUG oslo_vmware.api [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1601.979203] env[62519]: value = "task-1802490" [ 1601.979203] env[62519]: _type = "Task" [ 1601.979203] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.979387] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-9dca1519-a76d-4d3f-9c70-4f227c21ce2a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.991349] env[62519]: DEBUG oslo_vmware.api [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802490, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.992875] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1601.992875] env[62519]: value = "task-1802491" [ 1601.992875] env[62519]: _type = "Task" [ 1601.992875] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.003042] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802491, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.043689] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.044275] env[62519]: DEBUG nova.compute.manager [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Instance network_info: |[{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1602.044541] env[62519]: DEBUG oslo_concurrency.lockutils [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] Acquired lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.046144] env[62519]: DEBUG nova.network.neutron [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Refreshing network info cache for port c3e2a054-4826-4bd6-8c9e-74005e7912e4 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1602.046330] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:29:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e028024-a9c1-4cae-8849-ea770a7ae0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3e2a054-4826-4bd6-8c9e-74005e7912e4', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1602.060398] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Creating folder: Project (12977ed65a1b410a987b049e9d1dce3e). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1602.061668] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9815b486-22cd-4665-a67b-7aafc3b7dadf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.073835] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Created folder: Project (12977ed65a1b410a987b049e9d1dce3e) in parent group-v373567. [ 1602.074074] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Creating folder: Instances. Parent ref: group-v373718. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1602.074321] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8dadccd-d6c3-4584-8239-f4ca3b0d28b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.084962] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Created folder: Instances in parent group-v373718. [ 1602.085271] env[62519]: DEBUG oslo.service.loopingcall [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1602.085468] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1602.085670] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c552047a-fc6f-4ed3-9e1b-909fb1627a70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.106768] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1602.106768] env[62519]: value = "task-1802494" [ 1602.106768] env[62519]: _type = "Task" [ 1602.106768] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.110396] env[62519]: DEBUG nova.network.neutron [-] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.117153] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802494, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.268252] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 540167be-5295-4e28-9b25-16317746dd0e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1602.494455] env[62519]: DEBUG oslo_vmware.api [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802490, 'name': PowerOffVM_Task, 'duration_secs': 0.251067} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.497835] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1602.498249] env[62519]: DEBUG nova.compute.manager [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1602.499383] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06643579-cc1e-4256-9018-9efac28b2b2f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.512021] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802491, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.056846} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.512021] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62519) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1602.513466] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ab8c9d-87c8-4391-8153-a2baffc84d69 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.543504] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] f0925a44-c15b-4415-99bc-1b2366292fe4/ephemeral_0.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1602.544317] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8c626c3-0748-4c2a-867b-f10d363cc099 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.566366] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1602.566366] env[62519]: value = "task-1802495" [ 1602.566366] env[62519]: _type = "Task" [ 1602.566366] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.576667] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802495, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.613452] env[62519]: INFO nova.compute.manager [-] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Took 1.70 seconds to deallocate network for instance. [ 1602.623129] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802494, 'name': CreateVM_Task, 'duration_secs': 0.37906} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.623652] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1602.624429] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.624858] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.625394] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1602.625745] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01925573-d67c-40c3-9f7b-a21dda3b4a5f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.632016] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1602.632016] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5272b728-9349-c69a-7eb9-c442b0989641" [ 1602.632016] env[62519]: _type = "Task" [ 1602.632016] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.641311] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5272b728-9349-c69a-7eb9-c442b0989641, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.772413] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 46b3a0fb-29f6-4b66-a091-2d125b69d109 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1602.902647] env[62519]: DEBUG nova.network.neutron [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updated VIF entry in instance network info cache for port c3e2a054-4826-4bd6-8c9e-74005e7912e4. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1602.903067] env[62519]: DEBUG nova.network.neutron [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.922459] env[62519]: INFO nova.compute.manager [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Rebuilding instance [ 1602.971831] env[62519]: DEBUG nova.compute.manager [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1602.972774] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21f14eb-07fd-4c0a-b7f4-f2af03b7c4e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.020367] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eec27fb3-e6e1-4967-a642-45d429e05ad6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.082s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.079171] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802495, 'name': ReconfigVM_Task, 'duration_secs': 0.337525} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.079492] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Reconfigured VM instance instance-00000032 to attach disk [datastore1] f0925a44-c15b-4415-99bc-1b2366292fe4/ephemeral_0.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1603.080091] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c801d3f6-6131-4486-8702-32394473f6f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.087065] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1603.087065] env[62519]: value = "task-1802496" [ 1603.087065] env[62519]: _type = "Task" [ 1603.087065] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.096967] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802496, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.123193] env[62519]: DEBUG nova.network.neutron [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Successfully updated port: 883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1603.125415] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.144335] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5272b728-9349-c69a-7eb9-c442b0989641, 'name': SearchDatastore_Task, 'duration_secs': 0.012084} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.144832] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.145227] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1603.145599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.145877] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.146186] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1603.147032] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f32664e-1e0e-4551-af3c-506070579610 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.158807] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1603.158807] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1603.159794] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daad89d7-6bbe-458b-afb2-8e8f85b73c4f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.165827] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1603.165827] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]528ec5cd-5e62-e23b-a126-08e995c72f08" [ 1603.165827] env[62519]: _type = "Task" [ 1603.165827] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.175216] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528ec5cd-5e62-e23b-a126-08e995c72f08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.275518] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance bace23b3-b7f4-4f3b-8986-0076440d096d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1603.341341] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7f6d51ef-3fb2-4405-8ddb-ee0d7b872e58 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.753s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.406181] env[62519]: DEBUG oslo_concurrency.lockutils [req-9e9e848c-a7ba-4f82-baf6-a994081db8dc req-bc025aea-2939-469e-a26d-c37166af8a4b service nova] Releasing lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.598905] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802496, 'name': Rename_Task, 'duration_secs': 0.176936} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.598905] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1603.599098] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b00170e-23e9-4df4-a900-86c1ac8321a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.607914] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1603.607914] env[62519]: value = "task-1802497" [ 1603.607914] env[62519]: _type = "Task" [ 1603.607914] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.615621] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802497, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.626364] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.626589] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquired lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.626704] env[62519]: DEBUG nova.network.neutron [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1603.680967] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528ec5cd-5e62-e23b-a126-08e995c72f08, 'name': SearchDatastore_Task, 'duration_secs': 0.017614} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.685194] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a229878-a20c-4d8f-aa42-45fae554fd0d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.692318] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1603.692318] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52413982-fb68-0fa5-7307-812a57c8c145" [ 1603.692318] env[62519]: _type = "Task" [ 1603.692318] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.701498] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52413982-fb68-0fa5-7307-812a57c8c145, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.737214] env[62519]: DEBUG nova.compute.manager [req-7437750e-00d8-4036-9406-5a6a8b6b5685 req-09e4a753-f87e-4bb8-8031-48f4d77ececc service nova] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Received event network-vif-deleted-8d20f83b-f706-40ea-bba7-461aba73113f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1603.778740] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance ed716912-752e-4c6d-b6c6-fb349668fa93 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1603.778740] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Migration 33e7914a-348b-43c3-a3c3-7f65f496ad5a is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1603.778740] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance a1551278-a306-4534-8d8d-3b3a003dde04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1603.843690] env[62519]: DEBUG nova.compute.manager [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1603.987224] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1603.987551] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4eeedca7-cbfb-4e1d-a089-eb7ff46f8a88 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.998551] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1603.998551] env[62519]: value = "task-1802498" [ 1603.998551] env[62519]: _type = "Task" [ 1603.998551] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.008554] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.113875] env[62519]: DEBUG nova.compute.manager [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Received event network-vif-plugged-883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1604.114100] env[62519]: DEBUG oslo_concurrency.lockutils [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] Acquiring lock "9ac3344d-219a-487f-b83f-96c17cd86dad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.114341] env[62519]: DEBUG oslo_concurrency.lockutils [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.114521] env[62519]: DEBUG oslo_concurrency.lockutils [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.114686] env[62519]: DEBUG nova.compute.manager [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] No waiting events found dispatching network-vif-plugged-883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1604.114850] env[62519]: WARNING nova.compute.manager [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Received unexpected event network-vif-plugged-883790fe-b8ee-4a72-99ca-e7d80c7468f3 for instance with vm_state building and task_state spawning. [ 1604.115211] env[62519]: DEBUG nova.compute.manager [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Received event network-changed-883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1604.115428] env[62519]: DEBUG nova.compute.manager [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Refreshing instance network info cache due to event network-changed-883790fe-b8ee-4a72-99ca-e7d80c7468f3. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1604.115602] env[62519]: DEBUG oslo_concurrency.lockutils [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] Acquiring lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.122164] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802497, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.173956] env[62519]: DEBUG nova.network.neutron [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1604.189082] env[62519]: INFO nova.compute.manager [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Rebuilding instance [ 1604.203292] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52413982-fb68-0fa5-7307-812a57c8c145, 'name': SearchDatastore_Task, 'duration_secs': 0.041171} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.203565] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.203818] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8/8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1604.204170] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3db788a5-13f2-4aba-8931-ce79108b9990 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.216267] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1604.216267] env[62519]: value = "task-1802499" [ 1604.216267] env[62519]: _type = "Task" [ 1604.216267] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.227874] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802499, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.233481] env[62519]: DEBUG nova.compute.manager [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1604.234357] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1629f6f-2a85-4982-99af-44ced85edc46 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.281624] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 417588f8-6288-4ecd-9764-dbc923549c5d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1604.368974] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.437703] env[62519]: DEBUG nova.network.neutron [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updating instance_info_cache with network_info: [{"id": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "address": "fa:16:3e:bb:95:3b", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap883790fe-b8", "ovs_interfaceid": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.513367] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802498, 'name': PowerOffVM_Task, 'duration_secs': 0.341384} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.515018] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1604.515018] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1604.515018] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be85ca1e-cc38-4387-9c65-8a6809370818 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.525177] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1604.525554] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-974faafe-6fbb-437d-9fca-259f16b84553 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.622168] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802497, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.637857] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1604.638252] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1604.638463] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleting the datastore file [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1604.638786] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f246da64-6095-42d1-b8c8-589419f7736c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.648166] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1604.648166] env[62519]: value = "task-1802501" [ 1604.648166] env[62519]: _type = "Task" [ 1604.648166] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.658775] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.732278] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802499, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.786023] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 99f22198-1a65-4d0d-b665-90c7063dbdb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1604.806058] env[62519]: INFO nova.compute.manager [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Rebuilding instance [ 1604.855188] env[62519]: DEBUG nova.compute.manager [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1604.856027] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ced8d98-28c7-4579-a1c0-5aeef93d0baa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.941264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Releasing lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.941629] env[62519]: DEBUG nova.compute.manager [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance network_info: |[{"id": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "address": "fa:16:3e:bb:95:3b", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap883790fe-b8", "ovs_interfaceid": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1604.941942] env[62519]: DEBUG oslo_concurrency.lockutils [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] Acquired lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.942126] env[62519]: DEBUG nova.network.neutron [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Refreshing network info cache for port 883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1604.943324] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:95:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98d96b75-ac36-499a-adc2-130c8c1d55ca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '883790fe-b8ee-4a72-99ca-e7d80c7468f3', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1604.951176] env[62519]: DEBUG oslo.service.loopingcall [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1604.952187] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1604.952405] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-542e3a4d-2b86-4ee0-a8a5-da2d36db6f5a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.975485] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1604.975485] env[62519]: value = "task-1802502" [ 1604.975485] env[62519]: _type = "Task" [ 1604.975485] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.984431] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802502, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.119900] env[62519]: DEBUG oslo_vmware.api [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802497, 'name': PowerOnVM_Task, 'duration_secs': 1.194832} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.120187] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1605.120386] env[62519]: INFO nova.compute.manager [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Took 10.68 seconds to spawn the instance on the hypervisor. [ 1605.120560] env[62519]: DEBUG nova.compute.manager [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1605.121355] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bb37d2-efc5-460e-9efc-044cc8956e3d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.158063] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.230730] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802499, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615556} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.231140] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8/8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1605.231382] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1605.231706] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9dcb51c-da4d-4947-80b8-b722a6da6a28 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.241058] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1605.241058] env[62519]: value = "task-1802503" [ 1605.241058] env[62519]: _type = "Task" [ 1605.241058] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.253742] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.254059] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802503, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.254317] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85edf9e1-2bb8-4a5c-8414-d8e1a53857a9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.262397] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1605.262397] env[62519]: value = "task-1802504" [ 1605.262397] env[62519]: _type = "Task" [ 1605.262397] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.271809] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802504, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.288861] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8659f63a-5df9-4ff8-84dd-0722026dc820 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1605.289247] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 22 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1605.289381] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4800MB phys_disk=200GB used_disk=22GB total_vcpus=48 used_vcpus=22 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1605.486733] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802502, 'name': CreateVM_Task, 'duration_secs': 0.51186} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.488923] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1605.489780] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.489946] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1605.490297] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1605.490554] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2f6b686-55c4-4bbc-a9ae-3cc6266984c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.495831] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1605.495831] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52299fcc-01b6-3165-582c-f649a2bbeab4" [ 1605.495831] env[62519]: _type = "Task" [ 1605.495831] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.506574] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52299fcc-01b6-3165-582c-f649a2bbeab4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.640947] env[62519]: INFO nova.compute.manager [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Took 47.84 seconds to build instance. [ 1605.662645] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.656572} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.663517] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1605.663748] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1605.663873] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1605.760237] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802503, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07102} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.760531] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1605.761342] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7fcff1-90bd-4bae-b2e7-0dd94915eb78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.791745] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1605.792023] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1605.801982] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8/8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1605.805949] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b6d75b-9429-4f83-8acb-4fab213903d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.809075] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ba4c4ca-3c80-4145-a77c-fbaa1b6cd84c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.830788] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1605.832202] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-885fc706-f4c4-43c5-9cd2-8e95fe9dae9e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.833870] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1605.833870] env[62519]: value = "task-1802505" [ 1605.833870] env[62519]: _type = "Task" [ 1605.833870] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.834673] env[62519]: DEBUG nova.network.neutron [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updated VIF entry in instance network info cache for port 883790fe-b8ee-4a72-99ca-e7d80c7468f3. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1605.834995] env[62519]: DEBUG nova.network.neutron [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updating instance_info_cache with network_info: [{"id": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "address": "fa:16:3e:bb:95:3b", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap883790fe-b8", "ovs_interfaceid": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.850133] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802505, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.876766] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.877338] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5970d02e-22ca-4a29-96e3-bd320b730a31 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.884854] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1605.884854] env[62519]: value = "task-1802507" [ 1605.884854] env[62519]: _type = "Task" [ 1605.884854] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.899502] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.934865] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1605.935124] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1605.935309] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleting the datastore file [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1605.935576] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62f7bdca-7c60-43ea-a3cd-a3bc76d781b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.943679] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1605.943679] env[62519]: value = "task-1802508" [ 1605.943679] env[62519]: _type = "Task" [ 1605.943679] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.955606] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.975899] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc5539e-ad4e-4d32-a1a8-d30244374bd6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.984755] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b93ec3-9514-4102-9a1f-bb7112cf75b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.021775] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129f3944-30ad-46b1-8862-b1d480e17938 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.033936] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9b20c9-fb5e-49bb-a8e9-926e8a8260c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.037913] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52299fcc-01b6-3165-582c-f649a2bbeab4, 'name': SearchDatastore_Task, 'duration_secs': 0.068002} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.038283] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.038496] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1606.038731] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.038872] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.039057] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1606.039723] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d105e79e-2b64-4527-9437-cc82a1011a74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.050389] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1606.063143] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1606.063333] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1606.064113] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5607a159-de24-442d-8b87-a7bf9394fa19 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.070470] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1606.070470] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a36559-5bcf-e60f-b185-1954923637d1" [ 1606.070470] env[62519]: _type = "Task" [ 1606.070470] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.079916] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a36559-5bcf-e60f-b185-1954923637d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.343625] env[62519]: DEBUG oslo_concurrency.lockutils [req-dfc34390-07ee-4317-91d1-dc974cb276ea req-2f95dcc4-5393-4d2d-98a9-23f0671322c3 service nova] Releasing lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.348251] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802505, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.396205] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802507, 'name': PowerOffVM_Task, 'duration_secs': 0.258693} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.396205] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1606.396458] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1606.397434] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c202fa6-8fd0-47e8-a056-933e18582b55 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.405781] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1606.406046] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52453045-e5bf-4801-b0bb-246378baae2f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.458135] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177038} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.458313] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1606.458493] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1606.459029] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1606.496236] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1606.496478] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1606.496660] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleting the datastore file [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1606.497425] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cefbe8a-0138-49b2-86ab-4e1e04c10c25 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.504614] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1606.504614] env[62519]: value = "task-1802510" [ 1606.504614] env[62519]: _type = "Task" [ 1606.504614] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.513158] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.553623] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1606.582296] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a36559-5bcf-e60f-b185-1954923637d1, 'name': SearchDatastore_Task, 'duration_secs': 0.010087} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.583109] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14787dd5-47a1-45e1-ac4f-b0d4a6475ec3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.588949] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1606.588949] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bbf124-abbc-b384-db1f-53b8530aae0d" [ 1606.588949] env[62519]: _type = "Task" [ 1606.588949] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.597199] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bbf124-abbc-b384-db1f-53b8530aae0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.702969] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1606.703399] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1606.703746] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1606.704327] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1606.704763] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1606.705094] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1606.705635] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1606.705952] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1606.706428] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1606.707985] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1606.707985] env[62519]: DEBUG nova.virt.hardware [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1606.709908] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80505758-3376-428f-836f-e2d77a13808f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.719292] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b38e08e-5de5-4afe-9fd0-b5cc4f6d1395 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.735105] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:4a:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6b9aab2-f105-4c06-b204-f0626f41ccbe', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1606.742049] env[62519]: DEBUG oslo.service.loopingcall [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.742327] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1606.742537] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80f879c3-b125-4106-ab88-804b3fddd5e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.765967] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1606.765967] env[62519]: value = "task-1802511" [ 1606.765967] env[62519]: _type = "Task" [ 1606.765967] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.773996] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802511, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.848204] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802505, 'name': ReconfigVM_Task, 'duration_secs': 0.568706} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.848416] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8/8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1606.849112] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21e73308-d866-4608-bb48-9d20be4a6331 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.856172] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquiring lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.856429] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.860331] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1606.860331] env[62519]: value = "task-1802512" [ 1606.860331] env[62519]: _type = "Task" [ 1606.860331] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.873995] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802512, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.016988] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167786} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.017314] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1607.017529] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1607.017748] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1607.059961] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1607.060292] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.390s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.060827] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.747s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.062366] env[62519]: INFO nova.compute.claims [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1607.104249] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bbf124-abbc-b384-db1f-53b8530aae0d, 'name': SearchDatastore_Task, 'duration_secs': 0.011434} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.104512] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.105442] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 9ac3344d-219a-487f-b83f-96c17cd86dad/9ac3344d-219a-487f-b83f-96c17cd86dad.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1607.105442] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e180cf20-6fa4-4a5d-aff7-20d121fa652f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.113024] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1607.113024] env[62519]: value = "task-1802513" [ 1607.113024] env[62519]: _type = "Task" [ 1607.113024] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.122671] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802513, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.148910] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a05ef864-7b2b-4646-a83f-596c8b3440a9 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "f0925a44-c15b-4415-99bc-1b2366292fe4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.554s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.277724] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802511, 'name': CreateVM_Task, 'duration_secs': 0.501961} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.278014] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1607.278725] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.278999] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.279270] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1607.279704] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-befbf16a-b28f-420b-b930-7d901f1d7bf3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.285925] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1607.285925] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52eb9162-0018-f23c-3302-247889de6dc3" [ 1607.285925] env[62519]: _type = "Task" [ 1607.285925] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.296602] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52eb9162-0018-f23c-3302-247889de6dc3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.374609] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802512, 'name': Rename_Task, 'duration_secs': 0.261182} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.375024] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1607.375405] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f741b031-cb41-4b70-ac70-862e08525859 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.384479] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1607.384479] env[62519]: value = "task-1802514" [ 1607.384479] env[62519]: _type = "Task" [ 1607.384479] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.394595] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802514, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.514009] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1607.514269] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1607.514426] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1607.514605] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1607.514749] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1607.514960] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1607.515322] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1607.515490] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1607.515657] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1607.515816] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1607.515987] env[62519]: DEBUG nova.virt.hardware [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1607.516889] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653d4963-dea4-4ada-b45a-9fd3c6a3284f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.529841] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efdc41d-3304-459a-9988-dde8d07c695b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.550624] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:eb:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1607.560581] env[62519]: DEBUG oslo.service.loopingcall [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1607.560581] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1607.560581] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18f01a98-2145-4d7f-9115-400a99b6229f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.587197] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1607.587197] env[62519]: value = "task-1802515" [ 1607.587197] env[62519]: _type = "Task" [ 1607.587197] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.599053] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802515, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.623524] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802513, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50376} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.623792] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 9ac3344d-219a-487f-b83f-96c17cd86dad/9ac3344d-219a-487f-b83f-96c17cd86dad.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1607.624015] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1607.624633] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-984ff8b8-83c8-4563-bd8f-fb6fec9ae6cd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.633109] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1607.633109] env[62519]: value = "task-1802516" [ 1607.633109] env[62519]: _type = "Task" [ 1607.633109] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.642036] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802516, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.651825] env[62519]: DEBUG nova.compute.manager [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1607.797340] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52eb9162-0018-f23c-3302-247889de6dc3, 'name': SearchDatastore_Task, 'duration_secs': 0.067844} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.797686] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.797919] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1607.798431] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.798846] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.799329] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1607.799713] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2caeb492-48d2-4d18-a5c9-1f054eb59c12 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.810526] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1607.810816] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1607.811499] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9218d16-c5a4-4abd-bc69-f4f9dd7f418e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.819538] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1607.819538] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fe74e2-19e7-d0ca-ba5e-cc9fcc33506f" [ 1607.819538] env[62519]: _type = "Task" [ 1607.819538] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.831243] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fe74e2-19e7-d0ca-ba5e-cc9fcc33506f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.897373] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802514, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.061353] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1608.061353] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1608.061353] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1608.061353] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1608.061559] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1608.061603] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1608.061812] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1608.061967] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1608.063148] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1608.063359] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1608.063545] env[62519]: DEBUG nova.virt.hardware [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1608.064447] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db0703d-8577-421b-811a-ebd977ca557e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.073923] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80169e7a-5feb-4fc4-87aa-c047101419e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.099019] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:25:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3cdcca9-9468-453b-894e-326f90b3cb34', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1608.109542] env[62519]: DEBUG oslo.service.loopingcall [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1608.112887] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1608.113166] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc0e7533-215f-4195-8926-eccdb0c0d0ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.140344] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802515, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.143115] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1608.143115] env[62519]: value = "task-1802517" [ 1608.143115] env[62519]: _type = "Task" [ 1608.143115] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.149386] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802516, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087204} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.149908] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1608.150847] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ed3bac-b883-4f7d-998e-09cae3a8cf02 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.165223] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802517, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.187221] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 9ac3344d-219a-487f-b83f-96c17cd86dad/9ac3344d-219a-487f-b83f-96c17cd86dad.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1608.190703] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.190970] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-265e792b-e406-4d3f-b8ac-2b33a1ef9671 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.214236] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1608.214236] env[62519]: value = "task-1802518" [ 1608.214236] env[62519]: _type = "Task" [ 1608.214236] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.228175] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802518, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.287386] env[62519]: DEBUG nova.compute.manager [req-2a1ce73e-a677-42c9-a028-f180911ac3de req-84eb29f4-d114-40c9-aa96-164de372271e service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Received event network-changed-4406b619-4b6f-453e-a6e6-7f9f83d1b8ff {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1608.287658] env[62519]: DEBUG nova.compute.manager [req-2a1ce73e-a677-42c9-a028-f180911ac3de req-84eb29f4-d114-40c9-aa96-164de372271e service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Refreshing instance network info cache due to event network-changed-4406b619-4b6f-453e-a6e6-7f9f83d1b8ff. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1608.287887] env[62519]: DEBUG oslo_concurrency.lockutils [req-2a1ce73e-a677-42c9-a028-f180911ac3de req-84eb29f4-d114-40c9-aa96-164de372271e service nova] Acquiring lock "refresh_cache-f0925a44-c15b-4415-99bc-1b2366292fe4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.290916] env[62519]: DEBUG oslo_concurrency.lockutils [req-2a1ce73e-a677-42c9-a028-f180911ac3de req-84eb29f4-d114-40c9-aa96-164de372271e service nova] Acquired lock "refresh_cache-f0925a44-c15b-4415-99bc-1b2366292fe4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.291160] env[62519]: DEBUG nova.network.neutron [req-2a1ce73e-a677-42c9-a028-f180911ac3de req-84eb29f4-d114-40c9-aa96-164de372271e service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Refreshing network info cache for port 4406b619-4b6f-453e-a6e6-7f9f83d1b8ff {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1608.337889] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fe74e2-19e7-d0ca-ba5e-cc9fcc33506f, 'name': SearchDatastore_Task, 'duration_secs': 0.011372} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.339612] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d8f25f1-4ed0-42b6-b14e-50e7821dcdc4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.346247] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1608.346247] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526976d1-11b9-8382-5483-189dac56d3b5" [ 1608.346247] env[62519]: _type = "Task" [ 1608.346247] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.359121] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526976d1-11b9-8382-5483-189dac56d3b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.398661] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802514, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.609819] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802515, 'name': CreateVM_Task, 'duration_secs': 1.007416} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.615070] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1608.617653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.617653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.617653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1608.617653] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91f0b512-7947-467f-a304-08fd82b28be7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.623471] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1608.623471] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524b1309-71a9-4d6c-744f-c02454111987" [ 1608.623471] env[62519]: _type = "Task" [ 1608.623471] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.635705] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524b1309-71a9-4d6c-744f-c02454111987, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.657718] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802517, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.728421] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802518, 'name': ReconfigVM_Task, 'duration_secs': 0.431409} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.728421] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 9ac3344d-219a-487f-b83f-96c17cd86dad/9ac3344d-219a-487f-b83f-96c17cd86dad.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1608.729187] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7955337-f09a-41ee-a85e-289ab2d465ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.740210] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1608.740210] env[62519]: value = "task-1802519" [ 1608.740210] env[62519]: _type = "Task" [ 1608.740210] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.749107] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802519, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.809170] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f497869-8065-463c-87dd-5959345f2362 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.819244] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6c77cb-3581-48fd-812a-96e543999d3c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.873293] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482c42fd-998b-4748-b31d-c8281d060c26 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.888370] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22a9003-f2b8-4201-9a56-2c7bf5d67e6e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.892392] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526976d1-11b9-8382-5483-189dac56d3b5, 'name': SearchDatastore_Task, 'duration_secs': 0.012439} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.892759] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.893041] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1608.896785] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ed15f8e-04ed-4f4a-9d4b-c63d54cd678c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.910095] env[62519]: DEBUG nova.compute.provider_tree [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1608.915658] env[62519]: DEBUG oslo_vmware.api [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802514, 'name': PowerOnVM_Task, 'duration_secs': 1.048698} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.915908] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1608.915908] env[62519]: value = "task-1802520" [ 1608.915908] env[62519]: _type = "Task" [ 1608.915908] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.918579] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1608.918579] env[62519]: INFO nova.compute.manager [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Took 9.95 seconds to spawn the instance on the hypervisor. [ 1608.918579] env[62519]: DEBUG nova.compute.manager [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1608.918579] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaf61c5-ceb7-4d5c-90f9-5ac968071cb5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.929465] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.138636] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524b1309-71a9-4d6c-744f-c02454111987, 'name': SearchDatastore_Task, 'duration_secs': 0.024445} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.139077] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.139457] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1609.139785] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.140085] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.140402] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1609.142234] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bac6729e-2ab9-464b-ba7f-fb799d1c9d87 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.161121] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802517, 'name': CreateVM_Task, 'duration_secs': 0.542709} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.165032] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1609.165565] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1609.165742] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1609.167441] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.167668] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.168907] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1609.169243] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b635b81-c80d-4826-9a0d-249a799380cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.172029] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fde3174-b9a4-45da-80c3-e138f5ca6a4d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.178820] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1609.178820] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a7cfa3-53e2-e22a-93c3-687deb2cae83" [ 1609.178820] env[62519]: _type = "Task" [ 1609.178820] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.180440] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1609.180440] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5233c4b0-6f98-002c-2511-a88c1a5e4bc0" [ 1609.180440] env[62519]: _type = "Task" [ 1609.180440] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.199866] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5233c4b0-6f98-002c-2511-a88c1a5e4bc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.203362] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a7cfa3-53e2-e22a-93c3-687deb2cae83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.254427] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802519, 'name': Rename_Task, 'duration_secs': 0.161603} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.254872] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1609.255240] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c547e99c-a074-456f-b0c6-b399e046d8cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.269569] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1609.269569] env[62519]: value = "task-1802521" [ 1609.269569] env[62519]: _type = "Task" [ 1609.269569] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.276452] env[62519]: DEBUG nova.network.neutron [req-2a1ce73e-a677-42c9-a028-f180911ac3de req-84eb29f4-d114-40c9-aa96-164de372271e service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Updated VIF entry in instance network info cache for port 4406b619-4b6f-453e-a6e6-7f9f83d1b8ff. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1609.276920] env[62519]: DEBUG nova.network.neutron [req-2a1ce73e-a677-42c9-a028-f180911ac3de req-84eb29f4-d114-40c9-aa96-164de372271e service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Updating instance_info_cache with network_info: [{"id": "4406b619-4b6f-453e-a6e6-7f9f83d1b8ff", "address": "fa:16:3e:e2:5e:d4", "network": {"id": "87dc972b-a043-4db2-b67a-2efe6d40d1aa", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1993987139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8929cfd4892c4e399062483665500dd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b8137fc-f23d-49b1-b19c-3123a5588f34", "external-id": "nsx-vlan-transportzone-709", "segmentation_id": 709, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4406b619-4b", "ovs_interfaceid": "4406b619-4b6f-453e-a6e6-7f9f83d1b8ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.286036] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802521, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.418166] env[62519]: DEBUG nova.scheduler.client.report [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1609.433418] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802520, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509955} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.433418] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1609.434093] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1609.434093] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8fab6098-020f-464d-870f-037ebdd97727 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.445252] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1609.445252] env[62519]: value = "task-1802522" [ 1609.445252] env[62519]: _type = "Task" [ 1609.445252] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.447664] env[62519]: INFO nova.compute.manager [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Took 45.55 seconds to build instance. [ 1609.460263] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802522, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.692419] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a7cfa3-53e2-e22a-93c3-687deb2cae83, 'name': SearchDatastore_Task, 'duration_secs': 0.065251} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.693595] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4b7617a-5cc9-4ffd-b355-74603a15fdef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.701017] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5233c4b0-6f98-002c-2511-a88c1a5e4bc0, 'name': SearchDatastore_Task, 'duration_secs': 0.070233} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.701017] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.701017] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1609.701017] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.703529] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1609.703529] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5274b30f-175b-eb5c-056c-2e918f171fc4" [ 1609.703529] env[62519]: _type = "Task" [ 1609.703529] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.712775] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5274b30f-175b-eb5c-056c-2e918f171fc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.781078] env[62519]: DEBUG oslo_concurrency.lockutils [req-2a1ce73e-a677-42c9-a028-f180911ac3de req-84eb29f4-d114-40c9-aa96-164de372271e service nova] Releasing lock "refresh_cache-f0925a44-c15b-4415-99bc-1b2366292fe4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.781517] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802521, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.927931] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.867s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.928530] env[62519]: DEBUG nova.compute.manager [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1609.932158] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 32.681s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.959136] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802522, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075024} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.960109] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1609.960896] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f42e76-6f13-41da-9198-da70ab1775ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.987529] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1609.988141] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07028880-9566-47bc-8a1f-2b6ba14fa63a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.010873] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1610.010873] env[62519]: value = "task-1802523" [ 1610.010873] env[62519]: _type = "Task" [ 1610.010873] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.020599] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802523, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.075144] env[62519]: DEBUG nova.compute.manager [req-486aa653-5f8d-4f20-a35c-2602faaab048 req-92e14325-1f9f-4777-826e-3cdd6758ab80 service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Received event network-changed-c3e2a054-4826-4bd6-8c9e-74005e7912e4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1610.075316] env[62519]: DEBUG nova.compute.manager [req-486aa653-5f8d-4f20-a35c-2602faaab048 req-92e14325-1f9f-4777-826e-3cdd6758ab80 service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Refreshing instance network info cache due to event network-changed-c3e2a054-4826-4bd6-8c9e-74005e7912e4. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1610.075580] env[62519]: DEBUG oslo_concurrency.lockutils [req-486aa653-5f8d-4f20-a35c-2602faaab048 req-92e14325-1f9f-4777-826e-3cdd6758ab80 service nova] Acquiring lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.075691] env[62519]: DEBUG oslo_concurrency.lockutils [req-486aa653-5f8d-4f20-a35c-2602faaab048 req-92e14325-1f9f-4777-826e-3cdd6758ab80 service nova] Acquired lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.075856] env[62519]: DEBUG nova.network.neutron [req-486aa653-5f8d-4f20-a35c-2602faaab048 req-92e14325-1f9f-4777-826e-3cdd6758ab80 service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Refreshing network info cache for port c3e2a054-4826-4bd6-8c9e-74005e7912e4 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1610.215238] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5274b30f-175b-eb5c-056c-2e918f171fc4, 'name': SearchDatastore_Task, 'duration_secs': 0.011} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.215527] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.215956] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d/80ef3fd4-b9ef-4fd2-a991-feec78a0c81d.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1610.216240] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.216384] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1610.216601] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cc1961f-c724-4da2-93b4-ab86bf792a56 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.218905] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b67dd5ba-d74c-4737-a859-a87497e20365 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.229850] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1610.229850] env[62519]: value = "task-1802524" [ 1610.229850] env[62519]: _type = "Task" [ 1610.229850] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.231630] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1610.231630] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1610.235010] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fa80f0a-95ef-43ba-ac61-d590b66fda58 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.242147] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1610.242147] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ac0a0b-74cf-a705-e1e7-f1735d2067bb" [ 1610.242147] env[62519]: _type = "Task" [ 1610.242147] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.244997] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802524, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.255314] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ac0a0b-74cf-a705-e1e7-f1735d2067bb, 'name': SearchDatastore_Task, 'duration_secs': 0.01129} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.256225] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-380e840b-23eb-4588-9955-dd42917c922d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.262169] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1610.262169] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52eb866f-909e-1a7f-586b-4439891302d2" [ 1610.262169] env[62519]: _type = "Task" [ 1610.262169] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.271801] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52eb866f-909e-1a7f-586b-4439891302d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.280726] env[62519]: DEBUG oslo_vmware.api [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802521, 'name': PowerOnVM_Task, 'duration_secs': 0.540188} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.281864] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1610.281864] env[62519]: INFO nova.compute.manager [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Took 9.07 seconds to spawn the instance on the hypervisor. [ 1610.281864] env[62519]: DEBUG nova.compute.manager [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1610.282149] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc78a48f-0398-46fb-b4ef-266710642729 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.435636] env[62519]: DEBUG nova.compute.utils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1610.437369] env[62519]: DEBUG nova.compute.manager [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1610.437589] env[62519]: DEBUG nova.network.neutron [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1610.442571] env[62519]: INFO nova.compute.claims [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1610.524126] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802523, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.532573] env[62519]: DEBUG nova.policy [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9b4ac19e10d4abdb7d7e54e5d093a8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0755b34e22d4478817ec4e2d57aac2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1610.740932] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802524, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.772621] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52eb866f-909e-1a7f-586b-4439891302d2, 'name': SearchDatastore_Task, 'duration_secs': 0.010398} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.772851] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.773284] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1/302edcd3-bd6e-41da-b731-4d4c1bb5c3c1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1610.773284] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d85ec93-00a6-4d02-a428-4cb2e528ef0a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.781087] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1610.781087] env[62519]: value = "task-1802525" [ 1610.781087] env[62519]: _type = "Task" [ 1610.781087] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.789815] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802525, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.799176] env[62519]: INFO nova.compute.manager [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Took 43.31 seconds to build instance. [ 1610.840153] env[62519]: DEBUG nova.network.neutron [req-486aa653-5f8d-4f20-a35c-2602faaab048 req-92e14325-1f9f-4777-826e-3cdd6758ab80 service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updated VIF entry in instance network info cache for port c3e2a054-4826-4bd6-8c9e-74005e7912e4. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1610.840506] env[62519]: DEBUG nova.network.neutron [req-486aa653-5f8d-4f20-a35c-2602faaab048 req-92e14325-1f9f-4777-826e-3cdd6758ab80 service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.949135] env[62519]: DEBUG nova.compute.manager [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1610.963675] env[62519]: INFO nova.compute.resource_tracker [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating resource usage from migration 33e7914a-348b-43c3-a3c3-7f65f496ad5a [ 1610.968771] env[62519]: DEBUG oslo_concurrency.lockutils [None req-77454b93-3f15-4824-8029-76272dd44c6c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.762s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.039815] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802523, 'name': ReconfigVM_Task, 'duration_secs': 0.680459} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.039815] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1611.039815] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58b0f9c0-1920-49a2-9052-1d0e4e858790 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.052531] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1611.052531] env[62519]: value = "task-1802526" [ 1611.052531] env[62519]: _type = "Task" [ 1611.052531] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.069024] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802526, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.146032] env[62519]: DEBUG nova.network.neutron [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Successfully created port: 76a9613b-a465-4420-ab6e-fd38db7bccac {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1611.247347] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802524, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513715} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.247696] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d/80ef3fd4-b9ef-4fd2-a991-feec78a0c81d.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1611.248022] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1611.248325] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2170dfa7-2a34-4b16-a7a7-01e5bf36ce8b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.263018] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1611.263018] env[62519]: value = "task-1802527" [ 1611.263018] env[62519]: _type = "Task" [ 1611.263018] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.274647] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802527, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.293361] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802525, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481386} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.296798] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1/302edcd3-bd6e-41da-b731-4d4c1bb5c3c1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1611.297217] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1611.297714] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e726d4c-2026-49e8-93e8-a1bbd6ee8f41 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.307204] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1611.307204] env[62519]: value = "task-1802528" [ 1611.307204] env[62519]: _type = "Task" [ 1611.307204] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.321562] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802528, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.343332] env[62519]: DEBUG oslo_concurrency.lockutils [req-486aa653-5f8d-4f20-a35c-2602faaab048 req-92e14325-1f9f-4777-826e-3cdd6758ab80 service nova] Releasing lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.473518] env[62519]: DEBUG nova.compute.manager [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1611.564695] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802526, 'name': Rename_Task, 'duration_secs': 0.24409} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.567311] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1611.567752] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c401ee6-f1f3-444f-ab89-d099c457f53f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.575286] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1611.575286] env[62519]: value = "task-1802529" [ 1611.575286] env[62519]: _type = "Task" [ 1611.575286] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.580044] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f89f73-ab83-40da-aae0-bf70a96ae132 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.589859] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802529, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.590924] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c904a2d4-70ba-4c34-98b3-c3749e083495 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.625515] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58b61e6-e737-4d94-8fd6-9cd7cb7c0468 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.641488] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8a9965-7036-40d8-98bb-a6bdc8097d8b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.652587] env[62519]: DEBUG nova.compute.provider_tree [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1611.771861] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802527, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076572} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.772157] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1611.772930] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16af210b-5254-4a18-86f2-fc30d49a7dc4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.796764] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d/80ef3fd4-b9ef-4fd2-a991-feec78a0c81d.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1611.797598] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eecaf678-683d-4747-99b8-fcf4444bbe27 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.824311] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802528, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06332} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.825363] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1611.826101] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1611.826101] env[62519]: value = "task-1802530" [ 1611.826101] env[62519]: _type = "Task" [ 1611.826101] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.826430] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980536bc-0078-473a-92b2-9a069d06b48b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.837736] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802530, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.855451] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1/302edcd3-bd6e-41da-b731-4d4c1bb5c3c1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1611.856048] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f66a64d-5b1f-4f8a-97b1-f3bf66a02cf9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.876750] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1611.876750] env[62519]: value = "task-1802531" [ 1611.876750] env[62519]: _type = "Task" [ 1611.876750] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.885550] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802531, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.977313] env[62519]: DEBUG nova.compute.manager [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1612.004348] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.015667] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1612.015963] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1612.016089] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1612.016276] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1612.016421] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1612.016565] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1612.016770] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1612.016926] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1612.017150] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1612.017554] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1612.017554] env[62519]: DEBUG nova.virt.hardware [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1612.018526] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181126bf-5dc8-4bd4-bba8-f438fa288a59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.040186] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530798a0-d9a5-4734-8fb5-3bd788df7042 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.085813] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802529, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.155656] env[62519]: DEBUG nova.scheduler.client.report [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1612.321873] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b3ee3a4d-b7fb-4e51-adc1-0f63f32d64f4 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.649s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.342850] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802530, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.388362] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.588783] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802529, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.591257] env[62519]: DEBUG nova.compute.manager [req-f30c22e8-a4df-40d6-ba0f-8eb4a4ed89a7 req-37dc98fb-531f-4283-87fd-030295822171 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Received event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1612.591504] env[62519]: DEBUG nova.compute.manager [req-f30c22e8-a4df-40d6-ba0f-8eb4a4ed89a7 req-37dc98fb-531f-4283-87fd-030295822171 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing instance network info cache due to event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1612.591687] env[62519]: DEBUG oslo_concurrency.lockutils [req-f30c22e8-a4df-40d6-ba0f-8eb4a4ed89a7 req-37dc98fb-531f-4283-87fd-030295822171 service nova] Acquiring lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.591783] env[62519]: DEBUG oslo_concurrency.lockutils [req-f30c22e8-a4df-40d6-ba0f-8eb4a4ed89a7 req-37dc98fb-531f-4283-87fd-030295822171 service nova] Acquired lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.591939] env[62519]: DEBUG nova.network.neutron [req-f30c22e8-a4df-40d6-ba0f-8eb4a4ed89a7 req-37dc98fb-531f-4283-87fd-030295822171 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1612.661341] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.729s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.661341] env[62519]: INFO nova.compute.manager [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Migrating [ 1612.668196] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.362s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.669705] env[62519]: INFO nova.compute.claims [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1612.824451] env[62519]: DEBUG nova.compute.manager [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1612.841127] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802530, 'name': ReconfigVM_Task, 'duration_secs': 0.73332} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.841752] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d/80ef3fd4-b9ef-4fd2-a991-feec78a0c81d.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1612.842441] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9330ed48-2de3-40c9-9a20-7bd89e158074 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.854894] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1612.854894] env[62519]: value = "task-1802532" [ 1612.854894] env[62519]: _type = "Task" [ 1612.854894] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.864100] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802532, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.891462] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802531, 'name': ReconfigVM_Task, 'duration_secs': 0.684576} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.891637] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1/302edcd3-bd6e-41da-b731-4d4c1bb5c3c1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1612.892259] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7171c70d-b852-42dc-8bd9-a9f47859bd2b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.900199] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1612.900199] env[62519]: value = "task-1802533" [ 1612.900199] env[62519]: _type = "Task" [ 1612.900199] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.909500] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802533, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.089918] env[62519]: DEBUG oslo_vmware.api [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802529, 'name': PowerOnVM_Task, 'duration_secs': 1.026691} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.090528] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1613.090787] env[62519]: DEBUG nova.compute.manager [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1613.091618] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5387a313-c8ac-4ef0-a154-43ee3e8d0afa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.185188] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.185397] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.185570] env[62519]: DEBUG nova.network.neutron [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1613.354149] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.365303] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802532, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.368705] env[62519]: DEBUG nova.network.neutron [req-f30c22e8-a4df-40d6-ba0f-8eb4a4ed89a7 req-37dc98fb-531f-4283-87fd-030295822171 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updated VIF entry in instance network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1613.369060] env[62519]: DEBUG nova.network.neutron [req-f30c22e8-a4df-40d6-ba0f-8eb4a4ed89a7 req-37dc98fb-531f-4283-87fd-030295822171 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updating instance_info_cache with network_info: [{"id": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "address": "fa:16:3e:dd:49:e8", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7948e8a1-83", "ovs_interfaceid": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.411484] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802533, 'name': Rename_Task, 'duration_secs': 0.220844} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.411728] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1613.411969] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1c5fe03-73ba-4c9a-9fd9-ace6de1ea5f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.420289] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1613.420289] env[62519]: value = "task-1802534" [ 1613.420289] env[62519]: _type = "Task" [ 1613.420289] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.428446] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.610157] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.677662] env[62519]: DEBUG nova.network.neutron [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Successfully updated port: 76a9613b-a465-4420-ab6e-fd38db7bccac {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1613.869086] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802532, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.872569] env[62519]: DEBUG oslo_concurrency.lockutils [req-f30c22e8-a4df-40d6-ba0f-8eb4a4ed89a7 req-37dc98fb-531f-4283-87fd-030295822171 service nova] Releasing lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.933319] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802534, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.182438] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "refresh_cache-c60f5d73-9d6d-4b5f-b71b-00b6b787d482" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.182585] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "refresh_cache-c60f5d73-9d6d-4b5f-b71b-00b6b787d482" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.182732] env[62519]: DEBUG nova.network.neutron [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1614.185343] env[62519]: DEBUG nova.network.neutron [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance_info_cache with network_info: [{"id": "1cf55e56-b406-4e45-9b63-912d4587f930", "address": "fa:16:3e:57:b8:10", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf55e56-b4", "ovs_interfaceid": "1cf55e56-b406-4e45-9b63-912d4587f930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.303499] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e69078-6a09-4819-8403-1d6220169ec3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.311970] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72acd2d4-6b23-423f-b6ff-9845cae430d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.347154] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c0f67b-86dc-41d9-88b2-cd9206936a5a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.356225] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cfd9de-ac5a-4df9-b7bd-2aaa68cc990b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.370876] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802532, 'name': Rename_Task, 'duration_secs': 1.261706} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.378495] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1614.378996] env[62519]: DEBUG nova.compute.provider_tree [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1614.380381] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ae424af-8e4c-4845-9954-ab0c867462ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.389909] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1614.389909] env[62519]: value = "task-1802535" [ 1614.389909] env[62519]: _type = "Task" [ 1614.389909] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.400280] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802535, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.433303] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802534, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.688632] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.741534] env[62519]: DEBUG nova.network.neutron [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1614.886365] env[62519]: DEBUG nova.scheduler.client.report [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1614.906467] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802535, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.934015] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802534, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.043690] env[62519]: DEBUG nova.network.neutron [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Updating instance_info_cache with network_info: [{"id": "76a9613b-a465-4420-ab6e-fd38db7bccac", "address": "fa:16:3e:50:84:49", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76a9613b-a4", "ovs_interfaceid": "76a9613b-a465-4420-ab6e-fd38db7bccac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.375595] env[62519]: DEBUG nova.compute.manager [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Received event network-vif-plugged-76a9613b-a465-4420-ab6e-fd38db7bccac {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1615.375813] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Acquiring lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.376024] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.376209] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.376358] env[62519]: DEBUG nova.compute.manager [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] No waiting events found dispatching network-vif-plugged-76a9613b-a465-4420-ab6e-fd38db7bccac {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1615.376539] env[62519]: WARNING nova.compute.manager [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Received unexpected event network-vif-plugged-76a9613b-a465-4420-ab6e-fd38db7bccac for instance with vm_state building and task_state spawning. [ 1615.379860] env[62519]: DEBUG nova.compute.manager [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Received event network-changed-76a9613b-a465-4420-ab6e-fd38db7bccac {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1615.379860] env[62519]: DEBUG nova.compute.manager [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Refreshing instance network info cache due to event network-changed-76a9613b-a465-4420-ab6e-fd38db7bccac. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1615.379860] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Acquiring lock "refresh_cache-c60f5d73-9d6d-4b5f-b71b-00b6b787d482" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.392671] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.724s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.392909] env[62519]: DEBUG nova.compute.manager [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1615.395749] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.955s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.395947] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.398016] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.783s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.400243] env[62519]: INFO nova.compute.claims [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1615.414575] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802535, 'name': PowerOnVM_Task, 'duration_secs': 0.616955} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.414710] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1615.414930] env[62519]: DEBUG nova.compute.manager [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1615.415699] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102dd04f-ca27-4a2c-8091-3e9b24fa4825 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.436973] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802534, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.440436] env[62519]: INFO nova.scheduler.client.report [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Deleted allocations for instance 1b4efc0f-474f-4a32-b199-c14f27b183e2 [ 1615.546526] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "refresh_cache-c60f5d73-9d6d-4b5f-b71b-00b6b787d482" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.546855] env[62519]: DEBUG nova.compute.manager [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Instance network_info: |[{"id": "76a9613b-a465-4420-ab6e-fd38db7bccac", "address": "fa:16:3e:50:84:49", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76a9613b-a4", "ovs_interfaceid": "76a9613b-a465-4420-ab6e-fd38db7bccac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1615.547280] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Acquired lock "refresh_cache-c60f5d73-9d6d-4b5f-b71b-00b6b787d482" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.547465] env[62519]: DEBUG nova.network.neutron [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Refreshing network info cache for port 76a9613b-a465-4420-ab6e-fd38db7bccac {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1615.548800] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:84:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '594b35bb-a20b-4f0e-bd35-9acf9cc6bf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76a9613b-a465-4420-ab6e-fd38db7bccac', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1615.557794] env[62519]: DEBUG oslo.service.loopingcall [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1615.560635] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1615.561595] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93416f54-e3e3-4507-bd46-ce25ca28c63b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.585296] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1615.585296] env[62519]: value = "task-1802536" [ 1615.585296] env[62519]: _type = "Task" [ 1615.585296] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.597306] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802536, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.909962] env[62519]: DEBUG nova.compute.utils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1615.911312] env[62519]: DEBUG nova.compute.manager [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1615.911509] env[62519]: DEBUG nova.network.neutron [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1615.933184] env[62519]: INFO nova.compute.manager [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] bringing vm to original state: 'stopped' [ 1615.946501] env[62519]: DEBUG oslo_vmware.api [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802534, 'name': PowerOnVM_Task, 'duration_secs': 2.347978} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.946935] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f05e4f9-be1d-429d-8b5d-c079517a8a2e tempest-ImagesNegativeTestJSON-1194326905 tempest-ImagesNegativeTestJSON-1194326905-project-member] Lock "1b4efc0f-474f-4a32-b199-c14f27b183e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.715s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.948088] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1615.948311] env[62519]: DEBUG nova.compute.manager [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1615.951051] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d136f8a-bfce-43ef-bff7-2035174ea3b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.974199] env[62519]: DEBUG nova.policy [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef1d07fc4c0a400dab4ec451e7c1fcfc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08813c8c5d0b45dbab5a05ed08ef9531', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1615.975896] env[62519]: DEBUG nova.network.neutron [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Updated VIF entry in instance network info cache for port 76a9613b-a465-4420-ab6e-fd38db7bccac. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1615.976224] env[62519]: DEBUG nova.network.neutron [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Updating instance_info_cache with network_info: [{"id": "76a9613b-a465-4420-ab6e-fd38db7bccac", "address": "fa:16:3e:50:84:49", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76a9613b-a4", "ovs_interfaceid": "76a9613b-a465-4420-ab6e-fd38db7bccac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.096339] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802536, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.208882] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecb7a40-70f4-4f7e-a4d7-4ed534fb2012 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.231139] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance 'a1551278-a306-4534-8d8d-3b3a003dde04' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1616.285903] env[62519]: DEBUG nova.network.neutron [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Successfully created port: d146cb22-9f7c-47f1-bcdf-fca468f1d2df {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1616.418383] env[62519]: DEBUG nova.compute.manager [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1616.478300] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.478985] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Releasing lock "refresh_cache-c60f5d73-9d6d-4b5f-b71b-00b6b787d482" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.479280] env[62519]: DEBUG nova.compute.manager [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Received event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1616.479477] env[62519]: DEBUG nova.compute.manager [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing instance network info cache due to event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1616.479968] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Acquiring lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.479968] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Acquired lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.479968] env[62519]: DEBUG nova.network.neutron [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1616.571146] env[62519]: DEBUG nova.network.neutron [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Successfully created port: 260a58db-3019-4382-9551-1787dc70912c {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1616.598297] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802536, 'name': CreateVM_Task, 'duration_secs': 0.661117} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.600769] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1616.601689] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.601781] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.602110] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1616.602400] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-721cc421-57cd-46ab-9b52-d4554aa5b565 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.607948] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1616.607948] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5410c-1c02-5cdc-2f66-328ea2a35aa2" [ 1616.607948] env[62519]: _type = "Task" [ 1616.607948] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.618476] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5410c-1c02-5cdc-2f66-328ea2a35aa2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.742436] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1616.742769] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62f337e8-173c-4f49-bb07-fcf4247a77f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.754118] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1616.754118] env[62519]: value = "task-1802537" [ 1616.754118] env[62519]: _type = "Task" [ 1616.754118] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.765448] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.816833] env[62519]: INFO nova.compute.manager [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Rebuilding instance [ 1616.872589] env[62519]: DEBUG nova.compute.manager [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1616.873881] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416e0f34-bc58-469e-af87-9162ee1561cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.946153] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.949030] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.949030] env[62519]: DEBUG nova.compute.manager [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1616.949030] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9591d595-bfc2-4ddf-937c-5bef9bcd3a8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.964173] env[62519]: DEBUG nova.compute.manager [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1616.973348] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383d9dc9-f958-4c37-983c-649cef95475f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.985713] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85135cf8-6364-434a-8e65-7785a7d3cb75 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.019462] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1dfbafc-9e30-4246-bdfa-16d17171c3ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.028904] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3560b7-0d22-4193-9763-0ab7296d4ab5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.047932] env[62519]: DEBUG nova.compute.provider_tree [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.119418] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5410c-1c02-5cdc-2f66-328ea2a35aa2, 'name': SearchDatastore_Task, 'duration_secs': 0.012047} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.119801] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.120095] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1617.120378] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.120563] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.120779] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1617.121841] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35c7540f-8fee-4037-9683-7eec24cfdd79 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.131805] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1617.132060] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1617.133159] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a50398dd-cd1b-45d4-b061-8134c0f685b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.138586] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1617.138586] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526090c8-0d2b-bdd4-e6b4-b52b9b9068cc" [ 1617.138586] env[62519]: _type = "Task" [ 1617.138586] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.151487] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526090c8-0d2b-bdd4-e6b4-b52b9b9068cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.267199] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802537, 'name': PowerOffVM_Task, 'duration_secs': 0.232732} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.267510] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1617.267691] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance 'a1551278-a306-4534-8d8d-3b3a003dde04' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1617.400429] env[62519]: DEBUG nova.network.neutron [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updated VIF entry in instance network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1617.400429] env[62519]: DEBUG nova.network.neutron [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updating instance_info_cache with network_info: [{"id": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "address": "fa:16:3e:dd:49:e8", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7948e8a1-83", "ovs_interfaceid": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.428881] env[62519]: DEBUG nova.compute.manager [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1617.454581] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1617.454815] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1617.454969] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1617.455378] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1617.455538] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1617.455686] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1617.455898] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1617.456064] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1617.456235] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1617.456395] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1617.456561] env[62519]: DEBUG nova.virt.hardware [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1617.457478] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227c9206-8599-49fc-91af-a34c70afcb7c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.466650] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1617.466992] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa31779e-1163-4869-8ee1-d71e9a2500c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.470268] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b5a1ec-339d-4ae3-889b-67c72bbb492d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.489771] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1617.489771] env[62519]: value = "task-1802538" [ 1617.489771] env[62519]: _type = "Task" [ 1617.489771] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.498813] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802538, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.553305] env[62519]: DEBUG nova.scheduler.client.report [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1617.649903] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526090c8-0d2b-bdd4-e6b4-b52b9b9068cc, 'name': SearchDatastore_Task, 'duration_secs': 0.012799} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.650813] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-121f1c8f-163d-47d7-af7f-e1fc25b3b213 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.658357] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1617.658357] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52943203-0701-85c9-0dc2-7dfba64dc473" [ 1617.658357] env[62519]: _type = "Task" [ 1617.658357] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.669116] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52943203-0701-85c9-0dc2-7dfba64dc473, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.774331] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1617.774583] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1617.776698] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1617.776698] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1617.776698] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1617.776698] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1617.776698] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1617.776970] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1617.776970] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1617.777124] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1617.777335] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1617.784374] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6df910a4-3a27-48a9-892b-8152e4dc8af3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.802189] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1617.802189] env[62519]: value = "task-1802539" [ 1617.802189] env[62519]: _type = "Task" [ 1617.802189] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.811503] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802539, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.898039] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1617.898500] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e497343-119a-40c0-9a9d-ce3d8b59a8c3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.906068] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Releasing lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.906068] env[62519]: DEBUG nova.compute.manager [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Received event network-changed-883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1617.906068] env[62519]: DEBUG nova.compute.manager [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Refreshing instance network info cache due to event network-changed-883790fe-b8ee-4a72-99ca-e7d80c7468f3. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1617.906068] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Acquiring lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.906068] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Acquired lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.906068] env[62519]: DEBUG nova.network.neutron [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Refreshing network info cache for port 883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1617.909515] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1617.909515] env[62519]: value = "task-1802540" [ 1617.909515] env[62519]: _type = "Task" [ 1617.909515] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.924252] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802540, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.002142] env[62519]: DEBUG oslo_vmware.api [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802538, 'name': PowerOffVM_Task, 'duration_secs': 0.305594} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.002421] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1618.002579] env[62519]: DEBUG nova.compute.manager [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1618.003443] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf35eb1-196c-4e67-9ee6-65b816687607 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.058460] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.058980] env[62519]: DEBUG nova.compute.manager [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1618.062499] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.280s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.067023] env[62519]: INFO nova.compute.claims [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1618.143752] env[62519]: DEBUG nova.compute.manager [req-90e27a62-9d92-4e83-9c19-031ffef16a34 req-09b4827e-93d8-4ca3-a90d-28c96cd1f8ba service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Received event network-changed-883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1618.143939] env[62519]: DEBUG nova.compute.manager [req-90e27a62-9d92-4e83-9c19-031ffef16a34 req-09b4827e-93d8-4ca3-a90d-28c96cd1f8ba service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Refreshing instance network info cache due to event network-changed-883790fe-b8ee-4a72-99ca-e7d80c7468f3. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1618.144144] env[62519]: DEBUG oslo_concurrency.lockutils [req-90e27a62-9d92-4e83-9c19-031ffef16a34 req-09b4827e-93d8-4ca3-a90d-28c96cd1f8ba service nova] Acquiring lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.155332] env[62519]: DEBUG nova.network.neutron [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Successfully updated port: d146cb22-9f7c-47f1-bcdf-fca468f1d2df {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1618.176805] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52943203-0701-85c9-0dc2-7dfba64dc473, 'name': SearchDatastore_Task, 'duration_secs': 0.011668} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.177296] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.178498] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c60f5d73-9d6d-4b5f-b71b-00b6b787d482/c60f5d73-9d6d-4b5f-b71b-00b6b787d482.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1618.178498] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d240939-f8a6-4822-82ab-241cb23ad4b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.188474] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1618.188474] env[62519]: value = "task-1802541" [ 1618.188474] env[62519]: _type = "Task" [ 1618.188474] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.200338] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.254325] env[62519]: DEBUG nova.compute.manager [req-e7c373f0-cb46-41fd-9f1d-053a64ffe63f req-0a3a6214-57cf-4cb0-9a74-75d889d7ac91 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Received event network-vif-plugged-d146cb22-9f7c-47f1-bcdf-fca468f1d2df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1618.255168] env[62519]: DEBUG oslo_concurrency.lockutils [req-e7c373f0-cb46-41fd-9f1d-053a64ffe63f req-0a3a6214-57cf-4cb0-9a74-75d889d7ac91 service nova] Acquiring lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.255168] env[62519]: DEBUG oslo_concurrency.lockutils [req-e7c373f0-cb46-41fd-9f1d-053a64ffe63f req-0a3a6214-57cf-4cb0-9a74-75d889d7ac91 service nova] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.255332] env[62519]: DEBUG oslo_concurrency.lockutils [req-e7c373f0-cb46-41fd-9f1d-053a64ffe63f req-0a3a6214-57cf-4cb0-9a74-75d889d7ac91 service nova] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.255431] env[62519]: DEBUG nova.compute.manager [req-e7c373f0-cb46-41fd-9f1d-053a64ffe63f req-0a3a6214-57cf-4cb0-9a74-75d889d7ac91 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] No waiting events found dispatching network-vif-plugged-d146cb22-9f7c-47f1-bcdf-fca468f1d2df {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1618.256227] env[62519]: WARNING nova.compute.manager [req-e7c373f0-cb46-41fd-9f1d-053a64ffe63f req-0a3a6214-57cf-4cb0-9a74-75d889d7ac91 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Received unexpected event network-vif-plugged-d146cb22-9f7c-47f1-bcdf-fca468f1d2df for instance with vm_state building and task_state spawning. [ 1618.315264] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802539, 'name': ReconfigVM_Task, 'duration_secs': 0.180933} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.315264] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance 'a1551278-a306-4534-8d8d-3b3a003dde04' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1618.424228] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802540, 'name': PowerOffVM_Task, 'duration_secs': 0.236029} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.424948] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1618.425251] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1618.426279] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a782a52d-a5b4-4637-806f-a65a8e94493f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.436660] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1618.437096] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5a56fff-5d56-4189-88e8-356a8f0a48f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.519209] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.572s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.570193] env[62519]: DEBUG nova.compute.utils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1618.576484] env[62519]: DEBUG nova.compute.manager [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1618.576484] env[62519]: DEBUG nova.network.neutron [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1618.601748] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1618.601943] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1618.602199] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleting the datastore file [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1618.602496] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f03fc9d0-a34a-4e17-9399-3053acf86879 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.611539] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1618.611539] env[62519]: value = "task-1802543" [ 1618.611539] env[62519]: _type = "Task" [ 1618.611539] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.626793] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.670563] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.670563] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.670827] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.670890] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.671055] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.673653] env[62519]: INFO nova.compute.manager [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Terminating instance [ 1618.684649] env[62519]: DEBUG nova.network.neutron [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updated VIF entry in instance network info cache for port 883790fe-b8ee-4a72-99ca-e7d80c7468f3. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1618.685161] env[62519]: DEBUG nova.network.neutron [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updating instance_info_cache with network_info: [{"id": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "address": "fa:16:3e:bb:95:3b", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap883790fe-b8", "ovs_interfaceid": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.704237] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802541, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.717528] env[62519]: DEBUG oslo_concurrency.lockutils [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "9ac3344d-219a-487f-b83f-96c17cd86dad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.717812] env[62519]: DEBUG oslo_concurrency.lockutils [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.718049] env[62519]: DEBUG oslo_concurrency.lockutils [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "9ac3344d-219a-487f-b83f-96c17cd86dad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.718264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.718454] env[62519]: DEBUG oslo_concurrency.lockutils [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.720704] env[62519]: INFO nova.compute.manager [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Terminating instance [ 1618.743452] env[62519]: DEBUG nova.policy [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '747dad76101f45a180a63bf8885b6a16', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf42ff6ca553460fa3ead0cc71ed1186', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1618.823252] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T08:12:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='95834530-4e39-49f6-a893-5ac841902368',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2145697929',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1618.823354] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1618.823630] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1618.823717] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1618.823930] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1618.824024] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1618.824312] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1618.824382] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1618.824514] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1618.824668] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1618.824828] env[62519]: DEBUG nova.virt.hardware [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1618.830599] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfiguring VM instance instance-0000002c to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1618.830981] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a515d304-cf34-495b-abf6-d26d9861d573 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.852524] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1618.852524] env[62519]: value = "task-1802544" [ 1618.852524] env[62519]: _type = "Task" [ 1618.852524] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.862687] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802544, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.075972] env[62519]: DEBUG nova.compute.manager [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1619.122861] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802543, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263023} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.126099] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1619.127952] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1619.127952] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1619.178798] env[62519]: DEBUG nova.compute.manager [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1619.179112] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1619.180306] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad03b71-a5f5-4b65-9566-11694c5d9b1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.190353] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02d1ca3-5ce1-42b3-80f5-c2e614d9f554 req-63243093-b132-4d39-841a-f115df62179a service nova] Releasing lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.195213] env[62519]: DEBUG oslo_concurrency.lockutils [req-90e27a62-9d92-4e83-9c19-031ffef16a34 req-09b4827e-93d8-4ca3-a90d-28c96cd1f8ba service nova] Acquired lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.195425] env[62519]: DEBUG nova.network.neutron [req-90e27a62-9d92-4e83-9c19-031ffef16a34 req-09b4827e-93d8-4ca3-a90d-28c96cd1f8ba service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Refreshing network info cache for port 883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1619.200729] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1619.205749] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5acf0e25-4f48-4819-a4b5-6e8f7dea5742 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.214451] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52729} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.215757] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c60f5d73-9d6d-4b5f-b71b-00b6b787d482/c60f5d73-9d6d-4b5f-b71b-00b6b787d482.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1619.215983] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1619.216620] env[62519]: DEBUG oslo_vmware.api [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1619.216620] env[62519]: value = "task-1802545" [ 1619.216620] env[62519]: _type = "Task" [ 1619.216620] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.216892] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7b0c880-e45c-440b-8588-97ee9bb26485 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.226358] env[62519]: DEBUG nova.compute.manager [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1619.226721] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1619.228614] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc324a36-c6fd-4db7-87ec-444f717700da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.238813] env[62519]: DEBUG oslo_vmware.api [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.239191] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1619.239191] env[62519]: value = "task-1802546" [ 1619.239191] env[62519]: _type = "Task" [ 1619.239191] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.250370] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1619.251135] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68d9adc5-5f63-47bc-af5e-deceadb29331 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.257991] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802546, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.260689] env[62519]: DEBUG oslo_vmware.api [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1619.260689] env[62519]: value = "task-1802547" [ 1619.260689] env[62519]: _type = "Task" [ 1619.260689] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.271334] env[62519]: DEBUG oslo_vmware.api [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.367341] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802544, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.595749] env[62519]: DEBUG nova.network.neutron [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Successfully created port: 028b3466-6a8c-46ec-81a2-43206a5852ec {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1619.685710] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987a38e2-501c-423c-9bd1-034d7ac5a28a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.692831] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.692967] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.701462] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c3b06f-31c3-48e8-aae9-3a01cdf9a387 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.737155] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a81aca4-3e27-4a95-b632-ab7176226e3b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.748785] env[62519]: DEBUG oslo_vmware.api [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802545, 'name': PowerOffVM_Task, 'duration_secs': 0.257975} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.753558] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1619.753558] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1619.753558] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9911b0ab-d9dd-49a3-8ffb-32431766ebc9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.754589] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31390c7a-14d3-475a-9d35-a3d5ea81b069 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.760317] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802546, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078286} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.761213] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1619.762034] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6467d82-e3f7-4d78-820b-33e9b34b4871 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.775242] env[62519]: DEBUG nova.compute.provider_tree [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1619.783203] env[62519]: DEBUG oslo_vmware.api [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802547, 'name': PowerOffVM_Task, 'duration_secs': 0.224762} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.793420] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1619.793613] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1619.802349] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] c60f5d73-9d6d-4b5f-b71b-00b6b787d482/c60f5d73-9d6d-4b5f-b71b-00b6b787d482.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1619.805242] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c400772b-8ff5-42ef-a71e-82c6bb71a622 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.806588] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b5c94b7-31be-4440-a1c7-3023cced81fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.829379] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1619.829379] env[62519]: value = "task-1802550" [ 1619.829379] env[62519]: _type = "Task" [ 1619.829379] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.838971] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802550, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.863762] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802544, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.883343] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1619.883571] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1619.883796] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleting the datastore file [datastore1] 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1619.884020] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7240ec5f-1a6e-4a54-b4a3-09388f1763a9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.891900] env[62519]: DEBUG oslo_vmware.api [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1619.891900] env[62519]: value = "task-1802551" [ 1619.891900] env[62519]: _type = "Task" [ 1619.891900] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.903943] env[62519]: DEBUG oslo_vmware.api [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.906150] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1619.906372] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1619.909557] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Deleting the datastore file [datastore1] 9ac3344d-219a-487f-b83f-96c17cd86dad {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1619.909557] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf65c030-c9e9-470c-8a55-8575841e27d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.916019] env[62519]: DEBUG oslo_vmware.api [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1619.916019] env[62519]: value = "task-1802552" [ 1619.916019] env[62519]: _type = "Task" [ 1619.916019] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.922631] env[62519]: DEBUG oslo_vmware.api [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.033387] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.086677] env[62519]: DEBUG nova.compute.manager [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1620.131564] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1620.131793] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1620.132036] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1620.132630] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1620.132630] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1620.132630] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1620.132630] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1620.133085] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1620.133085] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1620.133085] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1620.133296] env[62519]: DEBUG nova.virt.hardware [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1620.134139] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3303663e-e194-4ef4-9f0a-54be8e5c9a22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.146751] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f9a5fb-b0cd-4a39-8f84-0f253586042b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.173110] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1620.173420] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1620.173583] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1620.173763] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1620.173905] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1620.174355] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1620.174713] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1620.175016] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1620.175312] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1620.175576] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1620.179510] env[62519]: DEBUG nova.virt.hardware [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1620.179510] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eac3011-a160-4865-945d-45e715f45002 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.187218] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94dbb059-c6c5-4d94-a395-de197099e2e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.203298] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:4a:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6b9aab2-f105-4c06-b204-f0626f41ccbe', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1620.212707] env[62519]: DEBUG oslo.service.loopingcall [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1620.213295] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1620.213820] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76a226e3-4767-4819-8d72-8ef16c722f45 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.229079] env[62519]: DEBUG nova.network.neutron [req-90e27a62-9d92-4e83-9c19-031ffef16a34 req-09b4827e-93d8-4ca3-a90d-28c96cd1f8ba service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updated VIF entry in instance network info cache for port 883790fe-b8ee-4a72-99ca-e7d80c7468f3. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1620.229467] env[62519]: DEBUG nova.network.neutron [req-90e27a62-9d92-4e83-9c19-031ffef16a34 req-09b4827e-93d8-4ca3-a90d-28c96cd1f8ba service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updating instance_info_cache with network_info: [{"id": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "address": "fa:16:3e:bb:95:3b", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap883790fe-b8", "ovs_interfaceid": "883790fe-b8ee-4a72-99ca-e7d80c7468f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.237651] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1620.237651] env[62519]: value = "task-1802553" [ 1620.237651] env[62519]: _type = "Task" [ 1620.237651] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.249416] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802553, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.279278] env[62519]: DEBUG nova.scheduler.client.report [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1620.342110] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802550, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.363820] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802544, 'name': ReconfigVM_Task, 'duration_secs': 1.206755} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.364040] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfigured VM instance instance-0000002c to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1620.364803] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034141c1-d914-48ab-afe2-e0a89d65f38f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.388039] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] a1551278-a306-4534-8d8d-3b3a003dde04/a1551278-a306-4534-8d8d-3b3a003dde04.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1620.388256] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-336fb8ab-b9c0-4c28-9ff4-141284436d71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.411679] env[62519]: DEBUG oslo_vmware.api [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146045} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.413077] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1620.413281] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1620.413668] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1620.413668] env[62519]: INFO nova.compute.manager [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1620.413858] env[62519]: DEBUG oslo.service.loopingcall [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1620.414141] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1620.414141] env[62519]: value = "task-1802554" [ 1620.414141] env[62519]: _type = "Task" [ 1620.414141] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.414345] env[62519]: DEBUG nova.compute.manager [-] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1620.414426] env[62519]: DEBUG nova.network.neutron [-] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1620.429171] env[62519]: DEBUG oslo_vmware.api [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147591} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.432057] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1620.432278] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1620.432504] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1620.432736] env[62519]: INFO nova.compute.manager [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1620.432986] env[62519]: DEBUG oslo.service.loopingcall [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1620.433204] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802554, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.433437] env[62519]: DEBUG nova.compute.manager [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1620.433540] env[62519]: DEBUG nova.network.neutron [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1620.596388] env[62519]: DEBUG nova.network.neutron [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Successfully updated port: 260a58db-3019-4382-9551-1787dc70912c {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1620.732542] env[62519]: DEBUG oslo_concurrency.lockutils [req-90e27a62-9d92-4e83-9c19-031ffef16a34 req-09b4827e-93d8-4ca3-a90d-28c96cd1f8ba service nova] Releasing lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.748779] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802553, 'name': CreateVM_Task, 'duration_secs': 0.421761} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.748779] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1620.749077] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.749151] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.749463] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1620.749720] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ebc49f1-115d-40f1-8989-5bfba47cf79e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.755339] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1620.755339] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52716b21-8871-9097-b16d-bcd6f0498578" [ 1620.755339] env[62519]: _type = "Task" [ 1620.755339] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.764293] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52716b21-8871-9097-b16d-bcd6f0498578, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.785397] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.785972] env[62519]: DEBUG nova.compute.manager [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1620.789521] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.381s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.789731] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.792255] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.023s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.792633] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.794534] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.250s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.794714] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.797344] env[62519]: DEBUG oslo_concurrency.lockutils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.457s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.798243] env[62519]: INFO nova.compute.claims [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1620.829174] env[62519]: INFO nova.scheduler.client.report [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted allocations for instance d8780c40-0099-4ccc-84ae-72fbb14fa1ee [ 1620.830406] env[62519]: INFO nova.scheduler.client.report [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Deleted allocations for instance 2b68e95a-1d93-43ee-b0a6-996c9042f5c7 [ 1620.851968] env[62519]: INFO nova.scheduler.client.report [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Deleted allocations for instance 24cb49c8-b2ef-4ede-aea6-6e34081beca1 [ 1620.856903] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802550, 'name': ReconfigVM_Task, 'duration_secs': 0.523583} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.860039] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Reconfigured VM instance instance-00000036 to attach disk [datastore1] c60f5d73-9d6d-4b5f-b71b-00b6b787d482/c60f5d73-9d6d-4b5f-b71b-00b6b787d482.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1620.862171] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e27d43c5-537a-496a-8474-1a0de755e1a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.868548] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1620.868548] env[62519]: value = "task-1802555" [ 1620.868548] env[62519]: _type = "Task" [ 1620.868548] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.881571] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802555, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.929803] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802554, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.100763] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "refresh_cache-d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.101115] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquired lock "refresh_cache-d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.101168] env[62519]: DEBUG nova.network.neutron [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1621.118790] env[62519]: DEBUG nova.compute.manager [req-f6e5a01b-e806-4d21-bdc1-d673da8d4d4a req-b70c7fd1-fde2-4b62-8a32-c5e0efc1c938 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Received event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1621.118989] env[62519]: DEBUG nova.compute.manager [req-f6e5a01b-e806-4d21-bdc1-d673da8d4d4a req-b70c7fd1-fde2-4b62-8a32-c5e0efc1c938 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing instance network info cache due to event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1621.119212] env[62519]: DEBUG oslo_concurrency.lockutils [req-f6e5a01b-e806-4d21-bdc1-d673da8d4d4a req-b70c7fd1-fde2-4b62-8a32-c5e0efc1c938 service nova] Acquiring lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.119351] env[62519]: DEBUG oslo_concurrency.lockutils [req-f6e5a01b-e806-4d21-bdc1-d673da8d4d4a req-b70c7fd1-fde2-4b62-8a32-c5e0efc1c938 service nova] Acquired lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.119537] env[62519]: DEBUG nova.network.neutron [req-f6e5a01b-e806-4d21-bdc1-d673da8d4d4a req-b70c7fd1-fde2-4b62-8a32-c5e0efc1c938 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1621.135276] env[62519]: DEBUG nova.compute.manager [req-95a50061-ead3-4039-8f6b-946a3b6488ac req-70308a15-fbd6-494b-b1c5-10946e55beca service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Received event network-changed-d146cb22-9f7c-47f1-bcdf-fca468f1d2df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1621.135499] env[62519]: DEBUG nova.compute.manager [req-95a50061-ead3-4039-8f6b-946a3b6488ac req-70308a15-fbd6-494b-b1c5-10946e55beca service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Refreshing instance network info cache due to event network-changed-d146cb22-9f7c-47f1-bcdf-fca468f1d2df. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1621.135642] env[62519]: DEBUG oslo_concurrency.lockutils [req-95a50061-ead3-4039-8f6b-946a3b6488ac req-70308a15-fbd6-494b-b1c5-10946e55beca service nova] Acquiring lock "refresh_cache-d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.269099] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52716b21-8871-9097-b16d-bcd6f0498578, 'name': SearchDatastore_Task, 'duration_secs': 0.010942} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.269431] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.269663] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1621.269902] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.270020] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.270202] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1621.270485] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9738d77-1821-4725-a84a-322c34e2dc67 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.281699] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1621.282018] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1621.282662] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-253345db-7b3b-4b1c-8e7c-df0010be8845 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.288635] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1621.288635] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52987998-6aff-ad34-1447-80bcde0232e4" [ 1621.288635] env[62519]: _type = "Task" [ 1621.288635] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.302130] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52987998-6aff-ad34-1447-80bcde0232e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009683} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.302130] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a79196ab-b4ea-475d-84c4-49742ef41062 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.305253] env[62519]: DEBUG nova.compute.utils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1621.308838] env[62519]: DEBUG nova.compute.manager [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1621.308986] env[62519]: DEBUG nova.network.neutron [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1621.315315] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1621.315315] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fdac3e-9df3-90c6-4820-1b6d75594485" [ 1621.315315] env[62519]: _type = "Task" [ 1621.315315] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.326645] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fdac3e-9df3-90c6-4820-1b6d75594485, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.352294] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c02b397a-4feb-4365-8cb0-fba53cb05cb2 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "d8780c40-0099-4ccc-84ae-72fbb14fa1ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.529s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.353527] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e8faa11-5a84-4b80-9100-d3b897a22912 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "2b68e95a-1d93-43ee-b0a6-996c9042f5c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.694s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.364525] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4b5f69a0-fec4-435b-810a-e826d7caf87e tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "24cb49c8-b2ef-4ede-aea6-6e34081beca1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.880s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.368085] env[62519]: DEBUG nova.policy [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2fdec6fcda84ddeaaa1ee4ba6a58258', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17cd969b1e7d4bd795748560caf80077', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1621.387699] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802555, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.400897] env[62519]: DEBUG nova.network.neutron [-] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.406266] env[62519]: DEBUG nova.network.neutron [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.431306] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802554, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.663166] env[62519]: DEBUG nova.network.neutron [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1621.810433] env[62519]: DEBUG nova.compute.manager [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1621.828679] env[62519]: DEBUG nova.network.neutron [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Successfully created port: bcc785a2-3385-4fe4-85fc-7540000eb36b {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1621.838531] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fdac3e-9df3-90c6-4820-1b6d75594485, 'name': SearchDatastore_Task, 'duration_secs': 0.010591} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.838831] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.839278] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1621.839926] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ac84ea3-85a0-42e4-8a78-bd7e1cf8ab28 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.850843] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1621.850843] env[62519]: value = "task-1802556" [ 1621.850843] env[62519]: _type = "Task" [ 1621.850843] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.876494] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802556, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.892769] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802555, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.903220] env[62519]: INFO nova.compute.manager [-] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Took 1.49 seconds to deallocate network for instance. [ 1621.907778] env[62519]: INFO nova.compute.manager [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Took 1.47 seconds to deallocate network for instance. [ 1621.942840] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802554, 'name': ReconfigVM_Task, 'duration_secs': 1.390135} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.944032] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfigured VM instance instance-0000002c to attach disk [datastore1] a1551278-a306-4534-8d8d-3b3a003dde04/a1551278-a306-4534-8d8d-3b3a003dde04.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1621.944032] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance 'a1551278-a306-4534-8d8d-3b3a003dde04' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1621.986180] env[62519]: DEBUG nova.network.neutron [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Successfully updated port: 028b3466-6a8c-46ec-81a2-43206a5852ec {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1622.230202] env[62519]: DEBUG nova.network.neutron [req-f6e5a01b-e806-4d21-bdc1-d673da8d4d4a req-b70c7fd1-fde2-4b62-8a32-c5e0efc1c938 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updated VIF entry in instance network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1622.230559] env[62519]: DEBUG nova.network.neutron [req-f6e5a01b-e806-4d21-bdc1-d673da8d4d4a req-b70c7fd1-fde2-4b62-8a32-c5e0efc1c938 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updating instance_info_cache with network_info: [{"id": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "address": "fa:16:3e:dd:49:e8", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7948e8a1-83", "ovs_interfaceid": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.352977] env[62519]: DEBUG nova.network.neutron [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Updating instance_info_cache with network_info: [{"id": "d146cb22-9f7c-47f1-bcdf-fca468f1d2df", "address": "fa:16:3e:28:5a:e0", "network": {"id": "8baaf922-141e-435f-b06b-b412fb43897f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1913709114", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd146cb22-9f", "ovs_interfaceid": "d146cb22-9f7c-47f1-bcdf-fca468f1d2df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "260a58db-3019-4382-9551-1787dc70912c", "address": "fa:16:3e:7f:88:2f", "network": {"id": "fad682b4-46f3-4c2d-a1f1-b5eb2bf04900", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1253928678", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap260a58db-30", "ovs_interfaceid": "260a58db-3019-4382-9551-1787dc70912c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.353885] env[62519]: WARNING oslo_messaging._drivers.amqpdriver [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1622.369412] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802556, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50397} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.370897] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1622.370897] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1622.370897] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d32672e1-4122-4aee-93d2-6fab5a25ad7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.383290] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1622.383290] env[62519]: value = "task-1802557" [ 1622.383290] env[62519]: _type = "Task" [ 1622.383290] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.395140] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802555, 'name': Rename_Task, 'duration_secs': 1.024167} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.396058] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1622.397141] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d5a6511-7862-4521-a1be-06e1c2bdd3e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.404109] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802557, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.408995] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.410941] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1622.410941] env[62519]: value = "task-1802558" [ 1622.410941] env[62519]: _type = "Task" [ 1622.410941] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.418230] env[62519]: DEBUG oslo_concurrency.lockutils [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.424998] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802558, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.445342] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44606fc7-484f-4844-8cbb-736b3086e549 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.450552] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b265a4-b222-4519-91af-60da59ff3c59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.476017] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc7d5e5-1dba-47fd-aa88-924eefb4fd18 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.480622] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9677998e-946d-4e82-a9d1-d5807e5d33ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.500176] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquiring lock "refresh_cache-540167be-5295-4e28-9b25-16317746dd0e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.500329] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquired lock "refresh_cache-540167be-5295-4e28-9b25-16317746dd0e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.500535] env[62519]: DEBUG nova.network.neutron [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1622.501738] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance 'a1551278-a306-4534-8d8d-3b3a003dde04' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1622.531904] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0e754a-9cb7-4783-96aa-3cab343cd1e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.540244] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630f76e1-f71c-4a38-a6ab-ff74875e2441 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.556249] env[62519]: DEBUG nova.compute.provider_tree [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1622.684020] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.684303] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.684534] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.684719] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.684884] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.688667] env[62519]: INFO nova.compute.manager [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Terminating instance [ 1622.738483] env[62519]: DEBUG oslo_concurrency.lockutils [req-f6e5a01b-e806-4d21-bdc1-d673da8d4d4a req-b70c7fd1-fde2-4b62-8a32-c5e0efc1c938 service nova] Releasing lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.829426] env[62519]: DEBUG nova.compute.manager [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1622.859543] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1622.859774] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1622.859932] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1622.860130] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1622.860338] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1622.860418] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1622.860621] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1622.860775] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1622.860937] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1622.861130] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1622.861273] env[62519]: DEBUG nova.virt.hardware [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1622.861768] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Releasing lock "refresh_cache-d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.862045] env[62519]: DEBUG nova.compute.manager [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Instance network_info: |[{"id": "d146cb22-9f7c-47f1-bcdf-fca468f1d2df", "address": "fa:16:3e:28:5a:e0", "network": {"id": "8baaf922-141e-435f-b06b-b412fb43897f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1913709114", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd146cb22-9f", "ovs_interfaceid": "d146cb22-9f7c-47f1-bcdf-fca468f1d2df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "260a58db-3019-4382-9551-1787dc70912c", "address": "fa:16:3e:7f:88:2f", "network": {"id": "fad682b4-46f3-4c2d-a1f1-b5eb2bf04900", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1253928678", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap260a58db-30", "ovs_interfaceid": "260a58db-3019-4382-9551-1787dc70912c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1622.862842] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e70c0d-5b98-4013-a155-300dae184650 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.865485] env[62519]: DEBUG oslo_concurrency.lockutils [req-95a50061-ead3-4039-8f6b-946a3b6488ac req-70308a15-fbd6-494b-b1c5-10946e55beca service nova] Acquired lock "refresh_cache-d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.865665] env[62519]: DEBUG nova.network.neutron [req-95a50061-ead3-4039-8f6b-946a3b6488ac req-70308a15-fbd6-494b-b1c5-10946e55beca service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Refreshing network info cache for port d146cb22-9f7c-47f1-bcdf-fca468f1d2df {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1622.866781] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:5a:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd146cb22-9f7c-47f1-bcdf-fca468f1d2df', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:88:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '260a58db-3019-4382-9551-1787dc70912c', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1622.876251] env[62519]: DEBUG oslo.service.loopingcall [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1622.880309] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1622.881334] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4748a88b-2c57-4e69-865a-ec6399df5ae1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.905366] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424fdaa6-82a3-4e50-8845-c065fe857afb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.910677] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1622.910677] env[62519]: value = "task-1802559" [ 1622.910677] env[62519]: _type = "Task" [ 1622.910677] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.929113] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802557, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072882} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.930411] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1622.931672] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3917139d-610c-444f-ba59-9548c55bd0c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.942996] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802558, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.943179] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802559, 'name': CreateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.970624] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1622.970989] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85b0eff6-bf62-4e30-9bec-6fe1b649756d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.993499] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1622.993499] env[62519]: value = "task-1802560" [ 1622.993499] env[62519]: _type = "Task" [ 1622.993499] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.002226] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802560, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.052144] env[62519]: DEBUG nova.network.neutron [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1623.061685] env[62519]: DEBUG nova.scheduler.client.report [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1623.173951] env[62519]: DEBUG nova.network.neutron [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Port 1cf55e56-b406-4e45-9b63-912d4587f930 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1623.195543] env[62519]: DEBUG nova.compute.manager [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1623.195693] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1623.196721] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a71332b-4e59-4681-9674-f0b2ddf60b8e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.205407] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1623.207777] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4600074c-948a-4774-8f48-f8b935ba8900 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.295514] env[62519]: DEBUG nova.network.neutron [req-95a50061-ead3-4039-8f6b-946a3b6488ac req-70308a15-fbd6-494b-b1c5-10946e55beca service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Updated VIF entry in instance network info cache for port d146cb22-9f7c-47f1-bcdf-fca468f1d2df. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1623.295514] env[62519]: DEBUG nova.network.neutron [req-95a50061-ead3-4039-8f6b-946a3b6488ac req-70308a15-fbd6-494b-b1c5-10946e55beca service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Updating instance_info_cache with network_info: [{"id": "d146cb22-9f7c-47f1-bcdf-fca468f1d2df", "address": "fa:16:3e:28:5a:e0", "network": {"id": "8baaf922-141e-435f-b06b-b412fb43897f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1913709114", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd146cb22-9f", "ovs_interfaceid": "d146cb22-9f7c-47f1-bcdf-fca468f1d2df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "260a58db-3019-4382-9551-1787dc70912c", "address": "fa:16:3e:7f:88:2f", "network": {"id": "fad682b4-46f3-4c2d-a1f1-b5eb2bf04900", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1253928678", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap260a58db-30", "ovs_interfaceid": "260a58db-3019-4382-9551-1787dc70912c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.324959] env[62519]: DEBUG nova.network.neutron [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Updating instance_info_cache with network_info: [{"id": "028b3466-6a8c-46ec-81a2-43206a5852ec", "address": "fa:16:3e:98:3e:21", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.85", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap028b3466-6a", "ovs_interfaceid": "028b3466-6a8c-46ec-81a2-43206a5852ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.402643] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1623.402867] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1623.403072] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleting the datastore file [datastore1] 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1623.403368] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e05e987c-b2e6-414a-a2e7-f23773b85d0e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.414139] env[62519]: DEBUG oslo_vmware.api [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1623.414139] env[62519]: value = "task-1802562" [ 1623.414139] env[62519]: _type = "Task" [ 1623.414139] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.430338] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802559, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.436783] env[62519]: DEBUG oslo_vmware.api [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802562, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.437409] env[62519]: DEBUG oslo_vmware.api [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802558, 'name': PowerOnVM_Task, 'duration_secs': 0.551077} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.437970] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1623.438352] env[62519]: INFO nova.compute.manager [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Took 11.46 seconds to spawn the instance on the hypervisor. [ 1623.438352] env[62519]: DEBUG nova.compute.manager [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1623.439162] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04aadab5-e8e9-4849-8cba-5b3327967aa9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.505232] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802560, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.547138] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "27f9e890-4733-43aa-9bf1-351d42d75418" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.547437] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "27f9e890-4733-43aa-9bf1-351d42d75418" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.547660] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "27f9e890-4733-43aa-9bf1-351d42d75418-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.547825] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "27f9e890-4733-43aa-9bf1-351d42d75418-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.547991] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "27f9e890-4733-43aa-9bf1-351d42d75418-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.555424] env[62519]: INFO nova.compute.manager [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Terminating instance [ 1623.567183] env[62519]: DEBUG oslo_concurrency.lockutils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.770s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.567306] env[62519]: DEBUG nova.compute.manager [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1623.571480] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.175s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.573500] env[62519]: INFO nova.compute.claims [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1623.804442] env[62519]: DEBUG oslo_concurrency.lockutils [req-95a50061-ead3-4039-8f6b-946a3b6488ac req-70308a15-fbd6-494b-b1c5-10946e55beca service nova] Releasing lock "refresh_cache-d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.828045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Releasing lock "refresh_cache-540167be-5295-4e28-9b25-16317746dd0e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.828415] env[62519]: DEBUG nova.compute.manager [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Instance network_info: |[{"id": "028b3466-6a8c-46ec-81a2-43206a5852ec", "address": "fa:16:3e:98:3e:21", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.85", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap028b3466-6a", "ovs_interfaceid": "028b3466-6a8c-46ec-81a2-43206a5852ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1623.829323] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:3e:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe99da4f-5630-4afd-918b-b327193d8489', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '028b3466-6a8c-46ec-81a2-43206a5852ec', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1623.846129] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Creating folder: Project (cf42ff6ca553460fa3ead0cc71ed1186). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1623.846129] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58cc608d-3abd-4755-bfd9-d5535a6b3e68 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.859132] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Created folder: Project (cf42ff6ca553460fa3ead0cc71ed1186) in parent group-v373567. [ 1623.859132] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Creating folder: Instances. Parent ref: group-v373728. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1623.863357] env[62519]: DEBUG nova.network.neutron [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Successfully updated port: bcc785a2-3385-4fe4-85fc-7540000eb36b {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1623.863357] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4dd2b35f-d9e1-4e17-a2fb-b2ec58406227 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.880296] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Created folder: Instances in parent group-v373728. [ 1623.880296] env[62519]: DEBUG oslo.service.loopingcall [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1623.880296] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1623.880296] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d60697b-6dab-4e0d-b0ff-03345732fe76 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.900444] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "1d4b14d3-8832-457e-aaed-462236555f57" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.900961] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "1d4b14d3-8832-457e-aaed-462236555f57" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.901235] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "1d4b14d3-8832-457e-aaed-462236555f57-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.901584] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "1d4b14d3-8832-457e-aaed-462236555f57-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.901761] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "1d4b14d3-8832-457e-aaed-462236555f57-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.903788] env[62519]: INFO nova.compute.manager [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Terminating instance [ 1623.914376] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1623.914376] env[62519]: value = "task-1802565" [ 1623.914376] env[62519]: _type = "Task" [ 1623.914376] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.931332] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802559, 'name': CreateVM_Task, 'duration_secs': 0.851329} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.931528] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1623.932749] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.932937] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.933352] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1623.933608] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edad98e7-7201-41dc-b3e0-0dc16db03b8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.944366] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802565, 'name': CreateVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.944366] env[62519]: DEBUG oslo_vmware.api [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802562, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134272} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.944366] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1623.944366] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1623.944366] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1623.944366] env[62519]: INFO nova.compute.manager [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Took 0.75 seconds to destroy the instance on the hypervisor. [ 1623.944833] env[62519]: DEBUG oslo.service.loopingcall [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1623.944833] env[62519]: DEBUG nova.compute.manager [-] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1623.944833] env[62519]: DEBUG nova.network.neutron [-] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1623.947723] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1623.947723] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52150036-7c62-af2b-a67c-ceadf0acb085" [ 1623.947723] env[62519]: _type = "Task" [ 1623.947723] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.960116] env[62519]: INFO nova.compute.manager [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Took 51.67 seconds to build instance. [ 1623.967021] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52150036-7c62-af2b-a67c-ceadf0acb085, 'name': SearchDatastore_Task, 'duration_secs': 0.013172} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.967021] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.967021] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1623.967396] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.967396] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.967490] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1623.967705] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-313d876d-bbd5-4c8f-9271-f2dc88811f66 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.977694] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1623.977894] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1623.978659] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cf069bb-3dac-4a9e-829e-ede7b7bece38 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.987306] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1623.987306] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522f4fb0-2281-988b-b105-e067a786b043" [ 1623.987306] env[62519]: _type = "Task" [ 1623.987306] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.000642] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522f4fb0-2281-988b-b105-e067a786b043, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.012228] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802560, 'name': ReconfigVM_Task, 'duration_secs': 0.774713} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.012304] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131/34d2991e-b6df-473d-8994-e45ff57ef131.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1624.012972] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15c31e10-225d-439d-8cbf-df854158c38f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.020726] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1624.020726] env[62519]: value = "task-1802566" [ 1624.020726] env[62519]: _type = "Task" [ 1624.020726] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.032212] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802566, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.059164] env[62519]: DEBUG nova.compute.manager [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1624.059411] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1624.060396] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea80b06-9403-404f-9161-918977caece9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.069037] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1624.069315] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8fa2164-14e8-426b-92a2-0afc036c130b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.080424] env[62519]: DEBUG nova.compute.utils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1624.081969] env[62519]: DEBUG oslo_vmware.api [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1624.081969] env[62519]: value = "task-1802567" [ 1624.081969] env[62519]: _type = "Task" [ 1624.081969] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.082697] env[62519]: DEBUG nova.compute.manager [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1624.082899] env[62519]: DEBUG nova.network.neutron [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1624.096766] env[62519]: DEBUG oslo_vmware.api [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802567, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.136399] env[62519]: DEBUG nova.policy [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f75f52c1c84e4ccda97b26834f89c0b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3038814dc07448fca423fb8c8dbe42c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1624.203764] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "a1551278-a306-4534-8d8d-3b3a003dde04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.205741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "a1551278-a306-4534-8d8d-3b3a003dde04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.004s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.205741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "a1551278-a306-4534-8d8d-3b3a003dde04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.361202] env[62519]: DEBUG nova.compute.manager [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Received event network-vif-plugged-028b3466-6a8c-46ec-81a2-43206a5852ec {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1624.361692] env[62519]: DEBUG oslo_concurrency.lockutils [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] Acquiring lock "540167be-5295-4e28-9b25-16317746dd0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.362038] env[62519]: DEBUG oslo_concurrency.lockutils [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] Lock "540167be-5295-4e28-9b25-16317746dd0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.362376] env[62519]: DEBUG oslo_concurrency.lockutils [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] Lock "540167be-5295-4e28-9b25-16317746dd0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.362481] env[62519]: DEBUG nova.compute.manager [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] No waiting events found dispatching network-vif-plugged-028b3466-6a8c-46ec-81a2-43206a5852ec {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1624.362720] env[62519]: WARNING nova.compute.manager [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Received unexpected event network-vif-plugged-028b3466-6a8c-46ec-81a2-43206a5852ec for instance with vm_state building and task_state spawning. [ 1624.362793] env[62519]: DEBUG nova.compute.manager [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Received event network-changed-028b3466-6a8c-46ec-81a2-43206a5852ec {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1624.363037] env[62519]: DEBUG nova.compute.manager [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Refreshing instance network info cache due to event network-changed-028b3466-6a8c-46ec-81a2-43206a5852ec. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1624.363381] env[62519]: DEBUG oslo_concurrency.lockutils [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] Acquiring lock "refresh_cache-540167be-5295-4e28-9b25-16317746dd0e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.363381] env[62519]: DEBUG oslo_concurrency.lockutils [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] Acquired lock "refresh_cache-540167be-5295-4e28-9b25-16317746dd0e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.364109] env[62519]: DEBUG nova.network.neutron [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Refreshing network info cache for port 028b3466-6a8c-46ec-81a2-43206a5852ec {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1624.369410] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "refresh_cache-46b3a0fb-29f6-4b66-a091-2d125b69d109" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.369410] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "refresh_cache-46b3a0fb-29f6-4b66-a091-2d125b69d109" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.369623] env[62519]: DEBUG nova.network.neutron [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1624.391137] env[62519]: DEBUG nova.compute.manager [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Received event network-vif-plugged-260a58db-3019-4382-9551-1787dc70912c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1624.392050] env[62519]: DEBUG oslo_concurrency.lockutils [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] Acquiring lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.392050] env[62519]: DEBUG oslo_concurrency.lockutils [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.392050] env[62519]: DEBUG oslo_concurrency.lockutils [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.392050] env[62519]: DEBUG nova.compute.manager [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] No waiting events found dispatching network-vif-plugged-260a58db-3019-4382-9551-1787dc70912c {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1624.392050] env[62519]: WARNING nova.compute.manager [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Received unexpected event network-vif-plugged-260a58db-3019-4382-9551-1787dc70912c for instance with vm_state building and task_state spawning. [ 1624.392393] env[62519]: DEBUG nova.compute.manager [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Received event network-changed-260a58db-3019-4382-9551-1787dc70912c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1624.392393] env[62519]: DEBUG nova.compute.manager [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Refreshing instance network info cache due to event network-changed-260a58db-3019-4382-9551-1787dc70912c. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1624.392444] env[62519]: DEBUG oslo_concurrency.lockutils [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] Acquiring lock "refresh_cache-d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.392571] env[62519]: DEBUG oslo_concurrency.lockutils [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] Acquired lock "refresh_cache-d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.392762] env[62519]: DEBUG nova.network.neutron [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Refreshing network info cache for port 260a58db-3019-4382-9551-1787dc70912c {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1624.407289] env[62519]: DEBUG nova.compute.manager [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1624.407557] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1624.408742] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0331573b-b814-4710-82a4-eeb3ac6425df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.418250] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1624.418551] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1634888-28fe-41b3-aaf4-1255957c337f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.430212] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802565, 'name': CreateVM_Task, 'duration_secs': 0.424442} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.431718] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1624.432138] env[62519]: DEBUG oslo_vmware.api [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1624.432138] env[62519]: value = "task-1802568" [ 1624.432138] env[62519]: _type = "Task" [ 1624.432138] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.432784] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.432935] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.433273] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1624.433597] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-925c96e8-e8f2-4d1e-b5d9-c78e069454f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.447027] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1624.447027] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524e35d8-e8a5-98eb-ddf9-2bf4df5b0a81" [ 1624.447027] env[62519]: _type = "Task" [ 1624.447027] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.450377] env[62519]: DEBUG oslo_vmware.api [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802568, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.460529] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524e35d8-e8a5-98eb-ddf9-2bf4df5b0a81, 'name': SearchDatastore_Task, 'duration_secs': 0.013235} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.460832] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.461084] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1624.461296] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.498876] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522f4fb0-2281-988b-b105-e067a786b043, 'name': SearchDatastore_Task, 'duration_secs': 0.015204} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.501056] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76a99d2b-f3f0-46dd-9672-4cb758b593f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.509288] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1624.509288] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b0f45a-9974-462b-411e-d17f12edb885" [ 1624.509288] env[62519]: _type = "Task" [ 1624.509288] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.518992] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b0f45a-9974-462b-411e-d17f12edb885, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.531633] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802566, 'name': Rename_Task, 'duration_secs': 0.229997} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.531927] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1624.532204] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13faa25b-255f-4855-8406-748d81c75e22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.540452] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1624.540452] env[62519]: value = "task-1802569" [ 1624.540452] env[62519]: _type = "Task" [ 1624.540452] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.550103] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802569, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.586351] env[62519]: DEBUG nova.compute.manager [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1624.600813] env[62519]: DEBUG oslo_vmware.api [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802567, 'name': PowerOffVM_Task, 'duration_secs': 0.240189} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.603103] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1624.603103] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1624.603103] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83177243-73be-4a01-8114-1b3e47c80b4b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.650348] env[62519]: DEBUG nova.network.neutron [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Successfully created port: e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1624.673725] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e107b91-7fe0-4ba5-92d7-9609ca3a3d2d tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "5786ea51-6499-4460-a123-a038ddcde8b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.677345] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e107b91-7fe0-4ba5-92d7-9609ca3a3d2d tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "5786ea51-6499-4460-a123-a038ddcde8b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.804683] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1624.804935] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1624.805186] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Deleting the datastore file [datastore1] 27f9e890-4733-43aa-9bf1-351d42d75418 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1624.805545] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e01ddd5-078d-460f-a6be-823f9460c4b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.812924] env[62519]: DEBUG oslo_vmware.api [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1624.812924] env[62519]: value = "task-1802571" [ 1624.812924] env[62519]: _type = "Task" [ 1624.812924] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.825527] env[62519]: DEBUG oslo_vmware.api [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802571, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.945118] env[62519]: DEBUG oslo_vmware.api [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802568, 'name': PowerOffVM_Task, 'duration_secs': 0.238818} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.948614] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1624.948778] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1624.951812] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4cae44b-0e7e-4ccc-b000-9330c7c8a95f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.953910] env[62519]: DEBUG nova.network.neutron [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1624.986231] env[62519]: DEBUG oslo_concurrency.lockutils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "9ac3344d-219a-487f-b83f-96c17cd86dad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.987512] env[62519]: DEBUG nova.network.neutron [-] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.031918] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b0f45a-9974-462b-411e-d17f12edb885, 'name': SearchDatastore_Task, 'duration_secs': 0.011864} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.032638] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.032916] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67/d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1625.033569] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.033759] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1625.034011] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-031afe73-0f12-41d5-a952-367820aab71d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.039148] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55822d93-7a1d-49cf-a355-3c3fe9e5bba0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.052704] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1625.052704] env[62519]: value = "task-1802573" [ 1625.052704] env[62519]: _type = "Task" [ 1625.052704] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.053260] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802569, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.054647] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1625.054820] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1625.067314] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92e123b0-cfbc-4f1f-9dbf-4fcc3c85c43b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.076313] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1625.076617] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1625.076932] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Deleting the datastore file [datastore1] 1d4b14d3-8832-457e-aaed-462236555f57 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1625.081870] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d0ac351-fbfb-47ce-96e6-17a75ee41554 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.084460] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.092662] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1625.092662] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520c6dc7-6aea-b947-629d-eb50e757b94e" [ 1625.092662] env[62519]: _type = "Task" [ 1625.092662] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.094443] env[62519]: INFO nova.virt.block_device [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Booting with volume f0e225ed-ae23-4580-9ecb-e74214791c63 at /dev/sda [ 1625.110026] env[62519]: DEBUG oslo_vmware.api [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for the task: (returnval){ [ 1625.110026] env[62519]: value = "task-1802574" [ 1625.110026] env[62519]: _type = "Task" [ 1625.110026] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.117852] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520c6dc7-6aea-b947-629d-eb50e757b94e, 'name': SearchDatastore_Task, 'duration_secs': 0.010233} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.123125] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94ac7054-9ed0-4156-b6dd-f4d4b8f1255b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.131410] env[62519]: DEBUG oslo_vmware.api [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.143741] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1625.143741] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52acecba-756d-a42b-dff1-48f3712b6e30" [ 1625.143741] env[62519]: _type = "Task" [ 1625.143741] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.153928] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b479c0df-5902-48d4-8f9a-0762073f6a9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.159970] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52acecba-756d-a42b-dff1-48f3712b6e30, 'name': SearchDatastore_Task, 'duration_secs': 0.012654} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.160353] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.160601] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 540167be-5295-4e28-9b25-16317746dd0e/540167be-5295-4e28-9b25-16317746dd0e.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1625.163318] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a63cd961-2e4f-4dd9-a267-af33c2e620d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.171194] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ea23f5-bca7-4fd0-8285-0723636f3a68 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.190195] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1625.190195] env[62519]: value = "task-1802575" [ 1625.190195] env[62519]: _type = "Task" [ 1625.190195] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.229126] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802575, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.237321] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b107cb6-2367-4d95-93bc-32fcfb65b25a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.249698] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b8c8e0-d489-44b5-8b26-d025cc08d80e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.306150] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.306508] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.306714] env[62519]: DEBUG nova.network.neutron [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1625.311995] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c7ae0f-8cbd-477b-ae43-9f560ebdac45 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.329230] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba1a569-7734-41da-a96e-518683622992 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.332141] env[62519]: DEBUG oslo_vmware.api [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802571, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178591} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.333806] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1625.333970] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1625.334176] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1625.334345] env[62519]: INFO nova.compute.manager [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1625.334692] env[62519]: DEBUG oslo.service.loopingcall [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.335378] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46b6eaf-569d-47ce-90e6-c791502c2b5a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.341547] env[62519]: DEBUG nova.network.neutron [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Updating instance_info_cache with network_info: [{"id": "bcc785a2-3385-4fe4-85fc-7540000eb36b", "address": "fa:16:3e:9d:ab:f3", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcc785a2-33", "ovs_interfaceid": "bcc785a2-3385-4fe4-85fc-7540000eb36b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.343414] env[62519]: DEBUG nova.compute.manager [-] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1625.343532] env[62519]: DEBUG nova.network.neutron [-] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1625.346981] env[62519]: DEBUG nova.network.neutron [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Updated VIF entry in instance network info cache for port 260a58db-3019-4382-9551-1787dc70912c. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1625.347415] env[62519]: DEBUG nova.network.neutron [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Updating instance_info_cache with network_info: [{"id": "d146cb22-9f7c-47f1-bcdf-fca468f1d2df", "address": "fa:16:3e:28:5a:e0", "network": {"id": "8baaf922-141e-435f-b06b-b412fb43897f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1913709114", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd146cb22-9f", "ovs_interfaceid": "d146cb22-9f7c-47f1-bcdf-fca468f1d2df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "260a58db-3019-4382-9551-1787dc70912c", "address": "fa:16:3e:7f:88:2f", "network": {"id": "fad682b4-46f3-4c2d-a1f1-b5eb2bf04900", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1253928678", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08813c8c5d0b45dbab5a05ed08ef9531", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap260a58db-30", "ovs_interfaceid": "260a58db-3019-4382-9551-1787dc70912c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.359146] env[62519]: DEBUG nova.network.neutron [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Updated VIF entry in instance network info cache for port 028b3466-6a8c-46ec-81a2-43206a5852ec. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1625.359543] env[62519]: DEBUG nova.network.neutron [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Updating instance_info_cache with network_info: [{"id": "028b3466-6a8c-46ec-81a2-43206a5852ec", "address": "fa:16:3e:98:3e:21", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.85", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap028b3466-6a", "ovs_interfaceid": "028b3466-6a8c-46ec-81a2-43206a5852ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.361597] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2567d0-853c-491e-baf5-41bd94a6433e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.365207] env[62519]: DEBUG nova.virt.block_device [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Updating existing volume attachment record: 515380eb-e65d-4c4f-8faa-4d0c4bb7fca5 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1625.407476] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b2c246-cf1b-4509-b57a-5d6e5d037782 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.422026] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcedaccc-3c33-405b-9156-f7d279834693 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.438669] env[62519]: DEBUG nova.compute.provider_tree [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1625.475885] env[62519]: DEBUG oslo_concurrency.lockutils [None req-94572272-cfe8-4ebd-bbc6-1659ba6f617f tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.746s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.488862] env[62519]: INFO nova.compute.manager [-] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Took 1.54 seconds to deallocate network for instance. [ 1625.554661] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802569, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.565588] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802573, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.618548] env[62519]: DEBUG oslo_vmware.api [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Task: {'id': task-1802574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.414074} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.619326] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1625.619326] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1625.619510] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1625.619663] env[62519]: INFO nova.compute.manager [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1625.619933] env[62519]: DEBUG oslo.service.loopingcall [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.620151] env[62519]: DEBUG nova.compute.manager [-] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1625.620249] env[62519]: DEBUG nova.network.neutron [-] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1625.715495] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802575, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.846860] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "refresh_cache-46b3a0fb-29f6-4b66-a091-2d125b69d109" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.847257] env[62519]: DEBUG nova.compute.manager [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Instance network_info: |[{"id": "bcc785a2-3385-4fe4-85fc-7540000eb36b", "address": "fa:16:3e:9d:ab:f3", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcc785a2-33", "ovs_interfaceid": "bcc785a2-3385-4fe4-85fc-7540000eb36b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1625.847872] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:ab:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89f807d9-140f-4a6f-8bce-96795f9482ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcc785a2-3385-4fe4-85fc-7540000eb36b', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1625.865038] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Creating folder: Project (17cd969b1e7d4bd795748560caf80077). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1625.865038] env[62519]: DEBUG oslo_concurrency.lockutils [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] Releasing lock "refresh_cache-d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.865038] env[62519]: DEBUG nova.compute.manager [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Received event network-vif-deleted-b3cdcca9-9468-453b-894e-326f90b3cb34 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1625.865038] env[62519]: DEBUG nova.compute.manager [req-c6d73a67-5ab3-4c21-8e85-b342641015cd req-3ee154ab-2f5b-4b5d-9018-2b4997e27656 service nova] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Received event network-vif-deleted-883790fe-b8ee-4a72-99ca-e7d80c7468f3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1625.865038] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77309157-1055-4d94-b0ea-eec19e200f6d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.871561] env[62519]: DEBUG oslo_concurrency.lockutils [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] Releasing lock "refresh_cache-540167be-5295-4e28-9b25-16317746dd0e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.871561] env[62519]: DEBUG nova.compute.manager [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Received event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1625.871561] env[62519]: DEBUG nova.compute.manager [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing instance network info cache due to event network-changed-7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1625.871561] env[62519]: DEBUG oslo_concurrency.lockutils [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] Acquiring lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.871561] env[62519]: DEBUG oslo_concurrency.lockutils [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] Acquired lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.871561] env[62519]: DEBUG nova.network.neutron [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Refreshing network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1625.880724] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Created folder: Project (17cd969b1e7d4bd795748560caf80077) in parent group-v373567. [ 1625.880724] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Creating folder: Instances. Parent ref: group-v373731. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1625.883616] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-573b97a2-7758-4cff-8251-b0cb2f4f8f6b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.901540] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Created folder: Instances in parent group-v373731. [ 1625.901833] env[62519]: DEBUG oslo.service.loopingcall [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.902060] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1625.902364] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8be9649b-e821-40fb-87eb-123733871e82 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.930608] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1625.930608] env[62519]: value = "task-1802578" [ 1625.930608] env[62519]: _type = "Task" [ 1625.930608] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.946512] env[62519]: DEBUG nova.scheduler.client.report [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1625.950576] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802578, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.977969] env[62519]: DEBUG nova.compute.manager [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1625.996316] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.057842] env[62519]: DEBUG oslo_vmware.api [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802569, 'name': PowerOnVM_Task, 'duration_secs': 1.267512} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.063893] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1626.064299] env[62519]: DEBUG nova.compute.manager [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1626.065327] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfdaab0-b0a9-4c4e-b12e-df131624a187 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.078916] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802573, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570501} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.084190] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67/d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1626.084190] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1626.084540] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9a866db-4fb8-4cfe-929e-7d75e04e6e1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.096262] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1626.096262] env[62519]: value = "task-1802579" [ 1626.096262] env[62519]: _type = "Task" [ 1626.096262] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.109011] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802579, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.202501] env[62519]: DEBUG nova.compute.manager [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1626.203505] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87279ddc-7b34-441b-9aeb-5c5cccaf1175 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.210020] env[62519]: DEBUG nova.network.neutron [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance_info_cache with network_info: [{"id": "1cf55e56-b406-4e45-9b63-912d4587f930", "address": "fa:16:3e:57:b8:10", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf55e56-b4", "ovs_interfaceid": "1cf55e56-b406-4e45-9b63-912d4587f930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.214040] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802575, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.915562} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.215859] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 540167be-5295-4e28-9b25-16317746dd0e/540167be-5295-4e28-9b25-16317746dd0e.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1626.216092] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1626.219075] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d0639d8-bea2-428b-bae7-67f6f4b1bdae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.228301] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1626.228301] env[62519]: value = "task-1802580" [ 1626.228301] env[62519]: _type = "Task" [ 1626.228301] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.238661] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.243498] env[62519]: DEBUG nova.network.neutron [-] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.443300] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802578, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.452467] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.881s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.453060] env[62519]: DEBUG nova.compute.manager [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1626.456196] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.571s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.456196] env[62519]: DEBUG nova.objects.instance [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lazy-loading 'resources' on Instance uuid 4e3dee19-b99a-4257-88da-1b0531e2c0f9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1626.508956] env[62519]: DEBUG nova.network.neutron [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Successfully updated port: e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1626.525189] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.579801] env[62519]: DEBUG nova.network.neutron [-] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.592799] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Received event network-vif-plugged-bcc785a2-3385-4fe4-85fc-7540000eb36b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1626.593467] env[62519]: DEBUG oslo_concurrency.lockutils [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] Acquiring lock "46b3a0fb-29f6-4b66-a091-2d125b69d109-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.593821] env[62519]: DEBUG oslo_concurrency.lockutils [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.594107] env[62519]: DEBUG oslo_concurrency.lockutils [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.594444] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] No waiting events found dispatching network-vif-plugged-bcc785a2-3385-4fe4-85fc-7540000eb36b {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1626.594699] env[62519]: WARNING nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Received unexpected event network-vif-plugged-bcc785a2-3385-4fe4-85fc-7540000eb36b for instance with vm_state building and task_state spawning. [ 1626.595039] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Received event network-changed-bcc785a2-3385-4fe4-85fc-7540000eb36b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1626.596178] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Refreshing instance network info cache due to event network-changed-bcc785a2-3385-4fe4-85fc-7540000eb36b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1626.596178] env[62519]: DEBUG oslo_concurrency.lockutils [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] Acquiring lock "refresh_cache-46b3a0fb-29f6-4b66-a091-2d125b69d109" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.596178] env[62519]: DEBUG oslo_concurrency.lockutils [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] Acquired lock "refresh_cache-46b3a0fb-29f6-4b66-a091-2d125b69d109" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.596178] env[62519]: DEBUG nova.network.neutron [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Refreshing network info cache for port bcc785a2-3385-4fe4-85fc-7540000eb36b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1626.604194] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.615947] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802579, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07488} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.616479] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1626.617582] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd6eeaf-2ffd-4e82-837b-3cc66ceabf35 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.644855] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67/d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1626.645739] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0319e293-13b2-453e-91a4-7bf642ae2603 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.672331] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1626.672331] env[62519]: value = "task-1802581" [ 1626.672331] env[62519]: _type = "Task" [ 1626.672331] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.684605] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802581, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.714584] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.722454] env[62519]: INFO nova.compute.manager [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] instance snapshotting [ 1626.727560] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56f2690-a9ab-4f84-acc3-c8fdc22b9c41 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.739591] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.752660] env[62519]: INFO nova.compute.manager [-] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Took 1.41 seconds to deallocate network for instance. [ 1626.755415] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6d7a14-6112-41eb-99d2-f1cfaeee174e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.891073] env[62519]: DEBUG nova.network.neutron [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updated VIF entry in instance network info cache for port 7948e8a1-83dc-4329-aa44-813b4e25c1c7. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1626.891500] env[62519]: DEBUG nova.network.neutron [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updating instance_info_cache with network_info: [{"id": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "address": "fa:16:3e:dd:49:e8", "network": {"id": "ca655458-a420-4494-a897-206c445e8893", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2085945802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08ccb6256cb446e1837e04580892a31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7948e8a1-83", "ovs_interfaceid": "7948e8a1-83dc-4329-aa44-813b4e25c1c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.956747] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802578, 'name': CreateVM_Task, 'duration_secs': 0.558683} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.957200] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1626.958124] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.958178] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.958487] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1626.960108] env[62519]: DEBUG nova.compute.utils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1626.963514] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f58c1b5c-80a0-4986-bd37-722ea9532b21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.965801] env[62519]: DEBUG nova.compute.manager [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1626.965977] env[62519]: DEBUG nova.network.neutron [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1626.972466] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1626.972466] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5262a8b8-59b4-0c3c-b506-6bad1bcf8352" [ 1626.972466] env[62519]: _type = "Task" [ 1626.972466] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.983919] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5262a8b8-59b4-0c3c-b506-6bad1bcf8352, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.009266] env[62519]: DEBUG nova.policy [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8dc0b2589af74b5f9602732295e8e228', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92b737895c7c42f78fbc5d0fff165dc8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1627.014048] env[62519]: DEBUG oslo_concurrency.lockutils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Acquiring lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.014205] env[62519]: DEBUG oslo_concurrency.lockutils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Acquired lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.014375] env[62519]: DEBUG nova.network.neutron [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1627.084602] env[62519]: INFO nova.compute.manager [-] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Took 1.46 seconds to deallocate network for instance. [ 1627.168350] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "40507d8c-8f30-45d4-9c65-03f8b1271afb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.168733] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.183820] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.242917] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802580, 'name': ExtendVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.257066] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f31a58-ab36-465c-a6ff-872a7ab88998 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.265644] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.288828] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1627.289569] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c26a6ca6-f3f5-4d2a-bdf3-cd06ca96481f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.291708] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ee4f47-f27b-4836-a371-fb66481cc1f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.300714] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance 'a1551278-a306-4534-8d8d-3b3a003dde04' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1627.309748] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1627.309748] env[62519]: value = "task-1802582" [ 1627.309748] env[62519]: _type = "Task" [ 1627.309748] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.325642] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802582, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.393979] env[62519]: DEBUG oslo_concurrency.lockutils [req-bdeb81e0-3ca5-4be4-9490-12b31debab0e req-728ecff7-ed41-4564-83cb-8d6461e20c52 service nova] Releasing lock "refresh_cache-09eefc1a-011b-4d2c-ab75-a1fcee740907" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.452584] env[62519]: DEBUG nova.network.neutron [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Successfully created port: ef550f91-d2ec-415e-8729-4b311d76b126 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1627.468675] env[62519]: DEBUG nova.compute.manager [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1627.485057] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5262a8b8-59b4-0c3c-b506-6bad1bcf8352, 'name': SearchDatastore_Task, 'duration_secs': 0.012915} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.486171] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.486427] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1627.486658] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.486797] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.486966] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1627.487446] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-973dce05-1d94-4acc-ab1d-602f4197e1e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.498488] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1627.498488] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1627.501424] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a76953ff-7dfd-4dc0-9c1e-1000ed64398a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.508083] env[62519]: DEBUG nova.compute.manager [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1627.508083] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1627.508083] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1627.508083] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1627.508083] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1627.508083] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1627.508083] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1627.508395] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1627.508509] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1627.508684] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1627.508842] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1627.509013] env[62519]: DEBUG nova.virt.hardware [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1627.511166] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2777b4-fe18-4837-aea3-586e0405c2bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.516307] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1627.516307] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521c833a-20f3-cd5b-c564-4d71df846507" [ 1627.516307] env[62519]: _type = "Task" [ 1627.516307] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.525890] env[62519]: DEBUG nova.network.neutron [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Updated VIF entry in instance network info cache for port bcc785a2-3385-4fe4-85fc-7540000eb36b. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1627.526357] env[62519]: DEBUG nova.network.neutron [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Updating instance_info_cache with network_info: [{"id": "bcc785a2-3385-4fe4-85fc-7540000eb36b", "address": "fa:16:3e:9d:ab:f3", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcc785a2-33", "ovs_interfaceid": "bcc785a2-3385-4fe4-85fc-7540000eb36b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.530398] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1905acce-5db5-41d5-93c9-faa6471d7b2b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.541253] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521c833a-20f3-cd5b-c564-4d71df846507, 'name': SearchDatastore_Task, 'duration_secs': 0.011561} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.546320] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6478c8b9-5533-4fad-83a0-55e8a8c512b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.561627] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1627.561627] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529b5137-4078-20a1-d6e7-3446f6e1871a" [ 1627.561627] env[62519]: _type = "Task" [ 1627.561627] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.572278] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529b5137-4078-20a1-d6e7-3446f6e1871a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.572975] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b476d3c-5095-4dd8-bdb5-51b443822cb9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.580503] env[62519]: DEBUG nova.network.neutron [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1627.582997] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9bad76-fafd-4a8a-bde4-c6dd6224f724 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.617337] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.620479] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4877f1-1c4f-42e7-bc39-96fe553be6fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.629471] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6ef092-ac9b-49e4-9a61-5a42dabc0811 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.643492] env[62519]: DEBUG nova.compute.provider_tree [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.683506] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.742963] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.016632} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.743316] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1627.744226] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b45c08-b1d4-4124-a512-c9faff43005d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.771021] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 540167be-5295-4e28-9b25-16317746dd0e/540167be-5295-4e28-9b25-16317746dd0e.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1627.772736] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a9ded3d-8306-484d-b533-ca3bbc2390e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.798328] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1627.798328] env[62519]: value = "task-1802583" [ 1627.798328] env[62519]: _type = "Task" [ 1627.798328] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.807644] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.811912] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1627.812282] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-502d21ed-d703-4f71-b09f-02d2abe184f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.827248] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802582, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.830358] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1627.830358] env[62519]: value = "task-1802584" [ 1627.830358] env[62519]: _type = "Task" [ 1627.830358] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.845151] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802584, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.850601] env[62519]: DEBUG nova.network.neutron [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Updating instance_info_cache with network_info: [{"id": "e252f987-8815-4905-a206-237207d5ac4b", "address": "fa:16:3e:52:cb:aa", "network": {"id": "7ee257b5-e74c-4a05-bbf8-b502402717ed", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-630407603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3038814dc07448fca423fb8c8dbe42c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape252f987-88", "ovs_interfaceid": "e252f987-8815-4905-a206-237207d5ac4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.029222] env[62519]: DEBUG oslo_concurrency.lockutils [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] Releasing lock "refresh_cache-46b3a0fb-29f6-4b66-a091-2d125b69d109" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.029558] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Received event network-vif-deleted-3dd85b46-bf0d-4e78-8975-e0c0cb8b29d8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1628.030132] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Received event network-vif-deleted-85d5a3eb-9d3d-46ac-b538-3e317a13e6fc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1628.030132] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Received event network-vif-deleted-54d71145-cb5f-4e36-b3dd-2905575f9742 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1628.030132] env[62519]: INFO nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Neutron deleted interface 54d71145-cb5f-4e36-b3dd-2905575f9742; detaching it from the instance and deleting it from the info cache [ 1628.030293] env[62519]: DEBUG nova.network.neutron [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.073272] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529b5137-4078-20a1-d6e7-3446f6e1871a, 'name': SearchDatastore_Task, 'duration_secs': 0.0111} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.073681] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.074137] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 46b3a0fb-29f6-4b66-a091-2d125b69d109/46b3a0fb-29f6-4b66-a091-2d125b69d109.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1628.074452] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc31c16d-155b-42a1-af26-0c929bfc5ff3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.082259] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1628.082259] env[62519]: value = "task-1802585" [ 1628.082259] env[62519]: _type = "Task" [ 1628.082259] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.091938] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.147939] env[62519]: DEBUG nova.scheduler.client.report [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1628.185833] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802581, 'name': ReconfigVM_Task, 'duration_secs': 1.080685} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.186261] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Reconfigured VM instance instance-00000037 to attach disk [datastore1] d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67/d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1628.187552] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c63a6b2e-2b72-43fb-b073-a11f9ae7eb3d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.196902] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1628.196902] env[62519]: value = "task-1802586" [ 1628.196902] env[62519]: _type = "Task" [ 1628.196902] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.208210] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802586, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.310856] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802583, 'name': ReconfigVM_Task, 'duration_secs': 0.288336} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.310985] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 540167be-5295-4e28-9b25-16317746dd0e/540167be-5295-4e28-9b25-16317746dd0e.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1628.311674] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3515f29-160f-4728-ae5e-ebf9c9652e04 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.325283] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802582, 'name': CreateSnapshot_Task, 'duration_secs': 0.924606} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.326900] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1628.327388] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1628.327388] env[62519]: value = "task-1802587" [ 1628.327388] env[62519]: _type = "Task" [ 1628.327388] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.328118] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce664327-7fc4-4649-b230-9b8cb3782bca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.336945] env[62519]: DEBUG oslo_concurrency.lockutils [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "dac173ff-1807-405f-a59c-bb2efef62a0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.337722] env[62519]: DEBUG oslo_concurrency.lockutils [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "dac173ff-1807-405f-a59c-bb2efef62a0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.337722] env[62519]: DEBUG oslo_concurrency.lockutils [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "dac173ff-1807-405f-a59c-bb2efef62a0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.337881] env[62519]: DEBUG oslo_concurrency.lockutils [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "dac173ff-1807-405f-a59c-bb2efef62a0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.338116] env[62519]: DEBUG oslo_concurrency.lockutils [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "dac173ff-1807-405f-a59c-bb2efef62a0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.351087] env[62519]: INFO nova.compute.manager [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Terminating instance [ 1628.353039] env[62519]: DEBUG oslo_concurrency.lockutils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Releasing lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.353386] env[62519]: DEBUG nova.compute.manager [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Instance network_info: |[{"id": "e252f987-8815-4905-a206-237207d5ac4b", "address": "fa:16:3e:52:cb:aa", "network": {"id": "7ee257b5-e74c-4a05-bbf8-b502402717ed", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-630407603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3038814dc07448fca423fb8c8dbe42c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape252f987-88", "ovs_interfaceid": "e252f987-8815-4905-a206-237207d5ac4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1628.355600] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:cb:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e252f987-8815-4905-a206-237207d5ac4b', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1628.365173] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Creating folder: Project (3038814dc07448fca423fb8c8dbe42c3). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1628.374730] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eab340ec-0449-4534-a1df-7c45958e0f47 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.376692] env[62519]: DEBUG oslo_vmware.api [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802584, 'name': PowerOnVM_Task, 'duration_secs': 0.429657} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.377356] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802587, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.380024] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1628.380024] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-64126044-6a15-4d18-9748-0958bc8df406 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance 'a1551278-a306-4534-8d8d-3b3a003dde04' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1628.394522] env[62519]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1628.394709] env[62519]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62519) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1628.395106] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Folder already exists: Project (3038814dc07448fca423fb8c8dbe42c3). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1628.395376] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Creating folder: Instances. Parent ref: group-v373691. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1628.395976] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4460f138-fff9-4071-84f3-130c501f73f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.410115] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Created folder: Instances in parent group-v373691. [ 1628.410451] env[62519]: DEBUG oslo.service.loopingcall [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.410684] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1628.410915] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f2acd63-3e4a-4829-9c16-4dd11c74bceb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.433713] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1628.433713] env[62519]: value = "task-1802590" [ 1628.433713] env[62519]: _type = "Task" [ 1628.433713] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.445447] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802590, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.481985] env[62519]: DEBUG nova.compute.manager [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1628.516020] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1628.516266] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1628.516458] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1628.516669] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1628.516821] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1628.517948] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1628.517948] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1628.517948] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1628.517948] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1628.517948] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1628.517948] env[62519]: DEBUG nova.virt.hardware [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1628.519402] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc939f1e-5dee-497e-9d00-3062930a7821 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.529603] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f73ce3-76d2-423a-8727-b95292379daa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.535490] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17effb1c-fe97-48e1-a913-a38b519586ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.554643] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd520a0-b3a3-4c15-b708-727cacbca83c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.601248] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Detach interface failed, port_id=54d71145-cb5f-4e36-b3dd-2905575f9742, reason: Instance 1d4b14d3-8832-457e-aaed-462236555f57 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1628.601248] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Received event network-vif-plugged-e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1628.601248] env[62519]: DEBUG oslo_concurrency.lockutils [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] Acquiring lock "bace23b3-b7f4-4f3b-8986-0076440d096d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.601248] env[62519]: DEBUG oslo_concurrency.lockutils [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] Lock "bace23b3-b7f4-4f3b-8986-0076440d096d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.601248] env[62519]: DEBUG oslo_concurrency.lockutils [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] Lock "bace23b3-b7f4-4f3b-8986-0076440d096d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.601248] env[62519]: DEBUG nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] No waiting events found dispatching network-vif-plugged-e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1628.601248] env[62519]: WARNING nova.compute.manager [req-55e7dcc9-d91c-4ff0-b3c4-1553927f7262 req-9862323a-87b8-40ae-a685-7f1394e10304 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Received unexpected event network-vif-plugged-e252f987-8815-4905-a206-237207d5ac4b for instance with vm_state building and task_state block_device_mapping. [ 1628.611670] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802585, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.625958] env[62519]: DEBUG nova.compute.manager [req-9fe378df-d44e-46f5-876d-8c1355971095 req-fab1db92-8efc-4799-98c3-d108af8c3899 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Received event network-changed-e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1628.626060] env[62519]: DEBUG nova.compute.manager [req-9fe378df-d44e-46f5-876d-8c1355971095 req-fab1db92-8efc-4799-98c3-d108af8c3899 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Refreshing instance network info cache due to event network-changed-e252f987-8815-4905-a206-237207d5ac4b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1628.626284] env[62519]: DEBUG oslo_concurrency.lockutils [req-9fe378df-d44e-46f5-876d-8c1355971095 req-fab1db92-8efc-4799-98c3-d108af8c3899 service nova] Acquiring lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.626412] env[62519]: DEBUG oslo_concurrency.lockutils [req-9fe378df-d44e-46f5-876d-8c1355971095 req-fab1db92-8efc-4799-98c3-d108af8c3899 service nova] Acquired lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.626578] env[62519]: DEBUG nova.network.neutron [req-9fe378df-d44e-46f5-876d-8c1355971095 req-fab1db92-8efc-4799-98c3-d108af8c3899 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Refreshing network info cache for port e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1628.652942] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.197s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.655368] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.530s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.655624] env[62519]: DEBUG nova.objects.instance [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lazy-loading 'resources' on Instance uuid ad374dd9-a92d-4b76-9609-7562346e05a8 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1628.683484] env[62519]: INFO nova.scheduler.client.report [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Deleted allocations for instance 4e3dee19-b99a-4257-88da-1b0531e2c0f9 [ 1628.708791] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802586, 'name': Rename_Task, 'duration_secs': 0.182862} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.708791] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1628.708791] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1a4feb8-dec3-4984-a7f7-4df96d7ab7fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.718085] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1628.718085] env[62519]: value = "task-1802591" [ 1628.718085] env[62519]: _type = "Task" [ 1628.718085] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.727430] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802591, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.843379] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802587, 'name': Rename_Task, 'duration_secs': 0.407022} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.843987] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1628.843987] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-833edc0d-eaad-4f48-a08d-af203b4f9231 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.851125] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1628.851125] env[62519]: value = "task-1802592" [ 1628.851125] env[62519]: _type = "Task" [ 1628.851125] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.861196] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1628.861524] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-408f3f97-58e1-4c09-82ce-fee4806abe29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.866579] env[62519]: DEBUG nova.compute.manager [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1628.866785] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1628.870898] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75297dd9-aec4-493e-b419-7758d983bad6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.873885] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802592, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.875415] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1628.875415] env[62519]: value = "task-1802593" [ 1628.875415] env[62519]: _type = "Task" [ 1628.875415] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.880930] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1628.881543] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02693086-48b0-487a-82e6-e3eb676e0ed4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.889946] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802593, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.895636] env[62519]: DEBUG oslo_vmware.api [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1628.895636] env[62519]: value = "task-1802594" [ 1628.895636] env[62519]: _type = "Task" [ 1628.895636] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.906192] env[62519]: DEBUG oslo_vmware.api [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802594, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.944741] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802590, 'name': CreateVM_Task, 'duration_secs': 0.490555} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.944906] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1628.945657] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373701', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'name': 'volume-f0e225ed-ae23-4580-9ecb-e74214791c63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bace23b3-b7f4-4f3b-8986-0076440d096d', 'attached_at': '', 'detached_at': '', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'serial': 'f0e225ed-ae23-4580-9ecb-e74214791c63'}, 'guest_format': None, 'boot_index': 0, 'delete_on_termination': True, 'device_type': None, 'disk_bus': None, 'mount_device': '/dev/sda', 'attachment_id': '515380eb-e65d-4c4f-8faa-4d0c4bb7fca5', 'volume_type': None}], 'swap': None} {{(pid=62519) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1628.945867] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Root volume attach. Driver type: vmdk {{(pid=62519) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1628.946684] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afc4446-5172-48de-905a-4e26fb38114e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.956324] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950d2edf-76ef-4331-9f2a-c57f8e7e972e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.963845] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960c1574-8ec1-44b3-befc-517dcf0bb503 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.971614] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-c1794b4f-8030-4861-92e1-1166104e4a18 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.980533] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1628.980533] env[62519]: value = "task-1802595" [ 1628.980533] env[62519]: _type = "Task" [ 1628.980533] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.989996] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802595, 'name': RelocateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.112108] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547546} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.112108] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 46b3a0fb-29f6-4b66-a091-2d125b69d109/46b3a0fb-29f6-4b66-a091-2d125b69d109.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1629.112108] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1629.112108] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1965e7af-7905-41ee-8fcd-ad5c027879c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.122352] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1629.122352] env[62519]: value = "task-1802596" [ 1629.122352] env[62519]: _type = "Task" [ 1629.122352] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.141451] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802596, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.195155] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb756757-24ba-440f-8631-f23661662462 tempest-ImagesOneServerTestJSON-1959164190 tempest-ImagesOneServerTestJSON-1959164190-project-member] Lock "4e3dee19-b99a-4257-88da-1b0531e2c0f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.016s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.200155] env[62519]: DEBUG nova.network.neutron [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Successfully updated port: ef550f91-d2ec-415e-8729-4b311d76b126 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1629.234928] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802591, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.374764] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802592, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.396688] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802593, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.413963] env[62519]: DEBUG oslo_vmware.api [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802594, 'name': PowerOffVM_Task, 'duration_secs': 0.24343} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.418428] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1629.418850] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1629.423019] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00a4e99d-a261-4013-b15a-e2f956c5a360 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.494596] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802595, 'name': RelocateVM_Task} progress is 20%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.519288] env[62519]: DEBUG nova.network.neutron [req-9fe378df-d44e-46f5-876d-8c1355971095 req-fab1db92-8efc-4799-98c3-d108af8c3899 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Updated VIF entry in instance network info cache for port e252f987-8815-4905-a206-237207d5ac4b. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1629.519288] env[62519]: DEBUG nova.network.neutron [req-9fe378df-d44e-46f5-876d-8c1355971095 req-fab1db92-8efc-4799-98c3-d108af8c3899 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Updating instance_info_cache with network_info: [{"id": "e252f987-8815-4905-a206-237207d5ac4b", "address": "fa:16:3e:52:cb:aa", "network": {"id": "7ee257b5-e74c-4a05-bbf8-b502402717ed", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-630407603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3038814dc07448fca423fb8c8dbe42c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape252f987-88", "ovs_interfaceid": "e252f987-8815-4905-a206-237207d5ac4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.561258] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1629.561258] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1629.561258] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleting the datastore file [datastore1] dac173ff-1807-405f-a59c-bb2efef62a0c {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1629.561258] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fb01cbb-d04e-4f40-85e1-d283963adb35 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.574554] env[62519]: DEBUG oslo_vmware.api [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1629.574554] env[62519]: value = "task-1802598" [ 1629.574554] env[62519]: _type = "Task" [ 1629.574554] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.583723] env[62519]: DEBUG oslo_vmware.api [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.634149] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802596, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118493} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.634439] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1629.635966] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f998440-b809-4589-b18f-9d80a7b143ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.666380] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 46b3a0fb-29f6-4b66-a091-2d125b69d109/46b3a0fb-29f6-4b66-a091-2d125b69d109.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1629.671152] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abf08182-b9c6-4eda-8869-7e591fbc3e5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.694682] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1629.694682] env[62519]: value = "task-1802599" [ 1629.694682] env[62519]: _type = "Task" [ 1629.694682] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.703682] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "refresh_cache-ed716912-752e-4c6d-b6c6-fb349668fa93" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.705248] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquired lock "refresh_cache-ed716912-752e-4c6d-b6c6-fb349668fa93" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1629.705248] env[62519]: DEBUG nova.network.neutron [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1629.708726] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802599, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.732958] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802591, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.814847] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c790dc-1ce1-4e76-8ff1-9ac2463d203e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.826743] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c8a550-d2ef-46c5-84d3-ec8b47951c5e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.869511] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644d4ef2-200d-4062-b023-3448623a39c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.879483] env[62519]: DEBUG oslo_vmware.api [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802592, 'name': PowerOnVM_Task, 'duration_secs': 0.551679} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.884739] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1629.884996] env[62519]: INFO nova.compute.manager [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Took 9.80 seconds to spawn the instance on the hypervisor. [ 1629.885226] env[62519]: DEBUG nova.compute.manager [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1629.886124] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49bdb49-9263-43dd-845e-cd8caed2200d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.890020] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6ce77e-7291-496d-9cf1-e0257df52c13 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.906187] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802593, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.920542] env[62519]: DEBUG nova.compute.provider_tree [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.994027] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802595, 'name': RelocateVM_Task, 'duration_secs': 0.67186} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.998335] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1629.998335] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373701', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'name': 'volume-f0e225ed-ae23-4580-9ecb-e74214791c63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bace23b3-b7f4-4f3b-8986-0076440d096d', 'attached_at': '', 'detached_at': '', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'serial': 'f0e225ed-ae23-4580-9ecb-e74214791c63'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1629.998335] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047c1630-c85f-4ef1-bb97-2a0bb09a4336 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.024900] env[62519]: DEBUG oslo_concurrency.lockutils [req-9fe378df-d44e-46f5-876d-8c1355971095 req-fab1db92-8efc-4799-98c3-d108af8c3899 service nova] Releasing lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.026536] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0504e7bd-a620-4e51-be68-940ffb656f84 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.065293] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] volume-f0e225ed-ae23-4580-9ecb-e74214791c63/volume-f0e225ed-ae23-4580-9ecb-e74214791c63.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1630.065786] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24d0b6ec-be1b-4c2c-aec1-49ef525c2228 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.107317] env[62519]: DEBUG oslo_vmware.api [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.406798} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.109143] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1630.109407] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1630.109621] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1630.109832] env[62519]: INFO nova.compute.manager [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1630.110372] env[62519]: DEBUG oslo.service.loopingcall [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1630.110486] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1630.110486] env[62519]: value = "task-1802600" [ 1630.110486] env[62519]: _type = "Task" [ 1630.110486] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.110670] env[62519]: DEBUG nova.compute.manager [-] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1630.112328] env[62519]: DEBUG nova.network.neutron [-] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1630.126429] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802600, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.207077] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802599, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.230802] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802591, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.270434] env[62519]: DEBUG nova.network.neutron [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1630.398212] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802593, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.428876] env[62519]: DEBUG nova.scheduler.client.report [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1630.436304] env[62519]: INFO nova.compute.manager [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Took 40.84 seconds to build instance. [ 1630.518597] env[62519]: DEBUG nova.network.neutron [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Updating instance_info_cache with network_info: [{"id": "ef550f91-d2ec-415e-8729-4b311d76b126", "address": "fa:16:3e:27:2e:b4", "network": {"id": "58441f17-708d-4a4c-9e78-8a8d403416ae", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-132258554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92b737895c7c42f78fbc5d0fff165dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef550f91-d2", "ovs_interfaceid": "ef550f91-d2ec-415e-8729-4b311d76b126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.625648] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802600, 'name': ReconfigVM_Task, 'duration_secs': 0.365108} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.625887] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Reconfigured VM instance instance-0000003b to attach disk [datastore1] volume-f0e225ed-ae23-4580-9ecb-e74214791c63/volume-f0e225ed-ae23-4580-9ecb-e74214791c63.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1630.631449] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c044f9b9-ba22-41d9-9943-d07b891eab9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.649066] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1630.649066] env[62519]: value = "task-1802601" [ 1630.649066] env[62519]: _type = "Task" [ 1630.649066] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.659163] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802601, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.712021] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802599, 'name': ReconfigVM_Task, 'duration_secs': 0.776242} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.712021] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 46b3a0fb-29f6-4b66-a091-2d125b69d109/46b3a0fb-29f6-4b66-a091-2d125b69d109.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1630.712021] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-895241c7-7fe0-4bfe-b4df-50b2373035e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.718640] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1630.718640] env[62519]: value = "task-1802602" [ 1630.718640] env[62519]: _type = "Task" [ 1630.718640] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.721706] env[62519]: DEBUG nova.compute.manager [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Received event network-vif-plugged-ef550f91-d2ec-415e-8729-4b311d76b126 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1630.721931] env[62519]: DEBUG oslo_concurrency.lockutils [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] Acquiring lock "ed716912-752e-4c6d-b6c6-fb349668fa93-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.722094] env[62519]: DEBUG oslo_concurrency.lockutils [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] Lock "ed716912-752e-4c6d-b6c6-fb349668fa93-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.722400] env[62519]: DEBUG oslo_concurrency.lockutils [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] Lock "ed716912-752e-4c6d-b6c6-fb349668fa93-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.722597] env[62519]: DEBUG nova.compute.manager [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] No waiting events found dispatching network-vif-plugged-ef550f91-d2ec-415e-8729-4b311d76b126 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1630.722876] env[62519]: WARNING nova.compute.manager [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Received unexpected event network-vif-plugged-ef550f91-d2ec-415e-8729-4b311d76b126 for instance with vm_state building and task_state spawning. [ 1630.723160] env[62519]: DEBUG nova.compute.manager [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Received event network-changed-ef550f91-d2ec-415e-8729-4b311d76b126 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1630.723424] env[62519]: DEBUG nova.compute.manager [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Refreshing instance network info cache due to event network-changed-ef550f91-d2ec-415e-8729-4b311d76b126. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1630.723707] env[62519]: DEBUG oslo_concurrency.lockutils [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] Acquiring lock "refresh_cache-ed716912-752e-4c6d-b6c6-fb349668fa93" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.745703] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802591, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.746665] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802602, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.893807] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802593, 'name': CloneVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.934354] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.279s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.940311] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.571s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.942791] env[62519]: INFO nova.compute.claims [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1630.967670] env[62519]: INFO nova.scheduler.client.report [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Deleted allocations for instance ad374dd9-a92d-4b76-9609-7562346e05a8 [ 1631.020998] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Releasing lock "refresh_cache-ed716912-752e-4c6d-b6c6-fb349668fa93" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.021353] env[62519]: DEBUG nova.compute.manager [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Instance network_info: |[{"id": "ef550f91-d2ec-415e-8729-4b311d76b126", "address": "fa:16:3e:27:2e:b4", "network": {"id": "58441f17-708d-4a4c-9e78-8a8d403416ae", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-132258554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92b737895c7c42f78fbc5d0fff165dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef550f91-d2", "ovs_interfaceid": "ef550f91-d2ec-415e-8729-4b311d76b126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1631.021778] env[62519]: DEBUG oslo_concurrency.lockutils [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] Acquired lock "refresh_cache-ed716912-752e-4c6d-b6c6-fb349668fa93" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.021959] env[62519]: DEBUG nova.network.neutron [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Refreshing network info cache for port ef550f91-d2ec-415e-8729-4b311d76b126 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1631.023177] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:2e:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef550f91-d2ec-415e-8729-4b311d76b126', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1631.031303] env[62519]: DEBUG oslo.service.loopingcall [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1631.032289] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1631.033134] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a3750c1-fdc0-4a54-a5b3-9a22cbc41c9d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.058923] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1631.058923] env[62519]: value = "task-1802603" [ 1631.058923] env[62519]: _type = "Task" [ 1631.058923] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.070149] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802603, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.071511] env[62519]: DEBUG nova.network.neutron [-] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.160922] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802601, 'name': ReconfigVM_Task, 'duration_secs': 0.18004} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.161374] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373701', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'name': 'volume-f0e225ed-ae23-4580-9ecb-e74214791c63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bace23b3-b7f4-4f3b-8986-0076440d096d', 'attached_at': '', 'detached_at': '', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'serial': 'f0e225ed-ae23-4580-9ecb-e74214791c63'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1631.161983] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6e6a904-19f2-4d92-b671-0510ec5d5a78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.172872] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1631.172872] env[62519]: value = "task-1802604" [ 1631.172872] env[62519]: _type = "Task" [ 1631.172872] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.181408] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802604, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.237774] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802591, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.239869] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802602, 'name': Rename_Task, 'duration_secs': 0.179826} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.240439] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1631.240872] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-adbca278-3c61-4e3b-a556-e1c237c57c6e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.250111] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1631.250111] env[62519]: value = "task-1802605" [ 1631.250111] env[62519]: _type = "Task" [ 1631.250111] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.261528] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802605, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.394374] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802593, 'name': CloneVM_Task, 'duration_secs': 2.080203} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.398141] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Created linked-clone VM from snapshot [ 1631.398141] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e988cc8e-0e8f-4fa6-a06a-63d9eea38f0f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.407089] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Uploading image b36eba9a-e231-4006-9afd-ecb0aaa21542 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1631.440701] env[62519]: DEBUG oslo_vmware.rw_handles [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1631.440701] env[62519]: value = "vm-373737" [ 1631.440701] env[62519]: _type = "VirtualMachine" [ 1631.440701] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1631.440701] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f1b3e032-fbbc-4a73-acde-7250c52dcf61 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.449583] env[62519]: DEBUG oslo_vmware.rw_handles [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lease: (returnval){ [ 1631.449583] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ff6d8f-b878-0792-42e8-0334df34a115" [ 1631.449583] env[62519]: _type = "HttpNfcLease" [ 1631.449583] env[62519]: } obtained for exporting VM: (result){ [ 1631.449583] env[62519]: value = "vm-373737" [ 1631.449583] env[62519]: _type = "VirtualMachine" [ 1631.449583] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1631.449963] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the lease: (returnval){ [ 1631.449963] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ff6d8f-b878-0792-42e8-0334df34a115" [ 1631.449963] env[62519]: _type = "HttpNfcLease" [ 1631.449963] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1631.470297] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1631.470297] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ff6d8f-b878-0792-42e8-0334df34a115" [ 1631.470297] env[62519]: _type = "HttpNfcLease" [ 1631.470297] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1631.481776] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7769f008-9fdc-4c87-9eab-a05d54387360 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "ad374dd9-a92d-4b76-9609-7562346e05a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.269s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.573924] env[62519]: INFO nova.compute.manager [-] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Took 1.46 seconds to deallocate network for instance. [ 1631.574353] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802603, 'name': CreateVM_Task, 'duration_secs': 0.451634} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.575819] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1631.576507] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.577674] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.577674] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1631.583242] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ff1ea4e-15b6-4154-b83d-7c0043aa88af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.585885] env[62519]: DEBUG nova.network.neutron [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Port 1cf55e56-b406-4e45-9b63-912d4587f930 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1631.586149] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.586304] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.586638] env[62519]: DEBUG nova.network.neutron [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1631.594523] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1631.594523] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5275bfd3-6f86-3ac6-8a1a-12aae52bd49a" [ 1631.594523] env[62519]: _type = "Task" [ 1631.594523] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.608534] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5275bfd3-6f86-3ac6-8a1a-12aae52bd49a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.685834] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802604, 'name': Rename_Task, 'duration_secs': 0.190139} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.686187] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1631.686473] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94a969a4-41a5-452c-981d-3b0735b20466 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.695872] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1631.695872] env[62519]: value = "task-1802607" [ 1631.695872] env[62519]: _type = "Task" [ 1631.695872] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.706206] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.736354] env[62519]: DEBUG oslo_vmware.api [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802591, 'name': PowerOnVM_Task, 'duration_secs': 3.013025} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.736713] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1631.737034] env[62519]: INFO nova.compute.manager [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Took 14.31 seconds to spawn the instance on the hypervisor. [ 1631.737589] env[62519]: DEBUG nova.compute.manager [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1631.738490] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6226a1-d232-4dd2-a372-f345d40e2ddd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.762362] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802605, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.884017] env[62519]: DEBUG nova.network.neutron [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Updated VIF entry in instance network info cache for port ef550f91-d2ec-415e-8729-4b311d76b126. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1631.884429] env[62519]: DEBUG nova.network.neutron [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Updating instance_info_cache with network_info: [{"id": "ef550f91-d2ec-415e-8729-4b311d76b126", "address": "fa:16:3e:27:2e:b4", "network": {"id": "58441f17-708d-4a4c-9e78-8a8d403416ae", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-132258554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92b737895c7c42f78fbc5d0fff165dc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef550f91-d2", "ovs_interfaceid": "ef550f91-d2ec-415e-8729-4b311d76b126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.963459] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1631.963459] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ff6d8f-b878-0792-42e8-0334df34a115" [ 1631.963459] env[62519]: _type = "HttpNfcLease" [ 1631.963459] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1631.965261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-852acb1c-a919-4af2-9465-5ce6a7ca4001 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "540167be-5295-4e28-9b25-16317746dd0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.824s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.965261] env[62519]: DEBUG oslo_vmware.rw_handles [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1631.965261] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ff6d8f-b878-0792-42e8-0334df34a115" [ 1631.965261] env[62519]: _type = "HttpNfcLease" [ 1631.965261] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1631.966673] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29be11c8-5f74-4ec8-ac12-245a29348e34 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.977359] env[62519]: DEBUG oslo_vmware.rw_handles [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522156cd-e48a-fffd-21fd-7097b3bed776/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1631.977601] env[62519]: DEBUG oslo_vmware.rw_handles [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522156cd-e48a-fffd-21fd-7097b3bed776/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1632.087211] env[62519]: DEBUG oslo_concurrency.lockutils [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.106980] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-14e751ae-4c90-4b5f-936d-b809787411ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.113335] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5275bfd3-6f86-3ac6-8a1a-12aae52bd49a, 'name': SearchDatastore_Task, 'duration_secs': 0.014379} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.114653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.114893] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1632.115224] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.115335] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.115592] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1632.115857] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41cb9801-781d-4c73-9732-95af58c24c46 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.128296] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1632.128507] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1632.131858] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9263cae-d2cc-4464-b93e-c654df86b8d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.144514] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1632.144514] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f0143a-03e1-22d1-b24f-e0f81d78d9f2" [ 1632.144514] env[62519]: _type = "Task" [ 1632.144514] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.165243] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f0143a-03e1-22d1-b24f-e0f81d78d9f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.214573] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802607, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.267046] env[62519]: INFO nova.compute.manager [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Took 49.98 seconds to build instance. [ 1632.277515] env[62519]: DEBUG oslo_vmware.api [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1802605, 'name': PowerOnVM_Task, 'duration_secs': 0.555848} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.278316] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1632.278568] env[62519]: INFO nova.compute.manager [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Took 9.45 seconds to spawn the instance on the hypervisor. [ 1632.278781] env[62519]: DEBUG nova.compute.manager [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1632.283142] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e67d235-01ae-4510-8da6-0042d91e4e13 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.381246] env[62519]: DEBUG nova.network.neutron [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance_info_cache with network_info: [{"id": "1cf55e56-b406-4e45-9b63-912d4587f930", "address": "fa:16:3e:57:b8:10", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf55e56-b4", "ovs_interfaceid": "1cf55e56-b406-4e45-9b63-912d4587f930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.394754] env[62519]: DEBUG oslo_concurrency.lockutils [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] Releasing lock "refresh_cache-ed716912-752e-4c6d-b6c6-fb349668fa93" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.395485] env[62519]: DEBUG nova.compute.manager [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Received event network-vif-deleted-bb52070a-ffaf-478e-b8c5-9273d58a0b34 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1632.395485] env[62519]: INFO nova.compute.manager [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Neutron deleted interface bb52070a-ffaf-478e-b8c5-9273d58a0b34; detaching it from the instance and deleting it from the info cache [ 1632.395485] env[62519]: DEBUG nova.network.neutron [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.471436] env[62519]: DEBUG nova.compute.manager [None req-5e107b91-7fe0-4ba5-92d7-9609ca3a3d2d tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 5786ea51-6499-4460-a123-a038ddcde8b5] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1632.607672] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf7eab9-649a-4bf0-9bc7-d732d9c3e039 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.617133] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68f9251-7d9c-4261-8219-52338043ef82 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.653969] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfb9b93-b738-4150-b176-dba8ce6ff5b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.666466] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f0143a-03e1-22d1-b24f-e0f81d78d9f2, 'name': SearchDatastore_Task, 'duration_secs': 0.013873} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.668388] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-649a3643-6b8f-4745-a5c6-509d8536223f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.674168] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdb75e3-d85a-4d68-80e1-485fb3a40a10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.681125] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1632.681125] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524b1a7f-862c-082c-8d5d-edf7928137bf" [ 1632.681125] env[62519]: _type = "Task" [ 1632.681125] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.690202] env[62519]: DEBUG nova.compute.provider_tree [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1632.705768] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524b1a7f-862c-082c-8d5d-edf7928137bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.714939] env[62519]: DEBUG oslo_vmware.api [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802607, 'name': PowerOnVM_Task, 'duration_secs': 0.674009} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.715394] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1632.715674] env[62519]: INFO nova.compute.manager [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Took 5.21 seconds to spawn the instance on the hypervisor. [ 1632.716264] env[62519]: DEBUG nova.compute.manager [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1632.717230] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e97cb87-5c84-4ba4-9805-9023edf8980e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.803990] env[62519]: INFO nova.compute.manager [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Took 42.05 seconds to build instance. [ 1632.886322] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.898410] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ed6704d-1aa2-4238-aafc-05fcbf8969b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.910251] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9350cfe9-7281-4ca5-b771-c6467f37d1b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.921363] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquiring lock "540167be-5295-4e28-9b25-16317746dd0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.921850] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "540167be-5295-4e28-9b25-16317746dd0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.922211] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquiring lock "540167be-5295-4e28-9b25-16317746dd0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.922546] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "540167be-5295-4e28-9b25-16317746dd0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.922865] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "540167be-5295-4e28-9b25-16317746dd0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.925026] env[62519]: INFO nova.compute.manager [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Terminating instance [ 1632.961232] env[62519]: DEBUG nova.compute.manager [req-183aa239-c82a-498f-a3b9-18a29fed293e req-8181b696-6c9d-4526-a304-9ba605a7b465 service nova] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Detach interface failed, port_id=bb52070a-ffaf-478e-b8c5-9273d58a0b34, reason: Instance dac173ff-1807-405f-a59c-bb2efef62a0c could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1632.983553] env[62519]: DEBUG nova.compute.manager [None req-5e107b91-7fe0-4ba5-92d7-9609ca3a3d2d tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 5786ea51-6499-4460-a123-a038ddcde8b5] Instance disappeared before build. {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2448}} [ 1633.961466] env[62519]: DEBUG nova.scheduler.client.report [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1633.969106] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.972108] env[62519]: DEBUG nova.compute.manager [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62519) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:902}} [ 1633.972390] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.972885] env[62519]: DEBUG nova.compute.manager [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1633.973619] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1633.977259] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6229882d-4988-4919-9f4d-3783a70683c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.981650] env[62519]: INFO nova.compute.manager [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Took 38.67 seconds to build instance. [ 1633.989285] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524b1a7f-862c-082c-8d5d-edf7928137bf, 'name': SearchDatastore_Task, 'duration_secs': 0.021088} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.993070] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.993428] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ed716912-752e-4c6d-b6c6-fb349668fa93/ed716912-752e-4c6d-b6c6-fb349668fa93.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1633.993876] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1633.994623] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4dd3a58-6e56-4234-8795-f11961307a58 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.997835] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a60f82c1-2075-4ef7-b51d-781605a29369 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.008036] env[62519]: DEBUG oslo_vmware.api [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1634.008036] env[62519]: value = "task-1802608" [ 1634.008036] env[62519]: _type = "Task" [ 1634.008036] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.009207] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1634.009207] env[62519]: value = "task-1802609" [ 1634.009207] env[62519]: _type = "Task" [ 1634.009207] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.024409] env[62519]: DEBUG oslo_vmware.api [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802608, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.028102] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.059014] env[62519]: DEBUG nova.compute.manager [req-4d4a276f-e77d-45e7-8908-302c2be5b1ef req-8b7576c2-0636-4446-89e1-5ef34879cc34 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Received event network-changed-bcc785a2-3385-4fe4-85fc-7540000eb36b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1634.059221] env[62519]: DEBUG nova.compute.manager [req-4d4a276f-e77d-45e7-8908-302c2be5b1ef req-8b7576c2-0636-4446-89e1-5ef34879cc34 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Refreshing instance network info cache due to event network-changed-bcc785a2-3385-4fe4-85fc-7540000eb36b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1634.059643] env[62519]: DEBUG oslo_concurrency.lockutils [req-4d4a276f-e77d-45e7-8908-302c2be5b1ef req-8b7576c2-0636-4446-89e1-5ef34879cc34 service nova] Acquiring lock "refresh_cache-46b3a0fb-29f6-4b66-a091-2d125b69d109" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1634.059643] env[62519]: DEBUG oslo_concurrency.lockutils [req-4d4a276f-e77d-45e7-8908-302c2be5b1ef req-8b7576c2-0636-4446-89e1-5ef34879cc34 service nova] Acquired lock "refresh_cache-46b3a0fb-29f6-4b66-a091-2d125b69d109" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.059739] env[62519]: DEBUG nova.network.neutron [req-4d4a276f-e77d-45e7-8908-302c2be5b1ef req-8b7576c2-0636-4446-89e1-5ef34879cc34 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Refreshing network info cache for port bcc785a2-3385-4fe4-85fc-7540000eb36b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1634.477750] env[62519]: DEBUG oslo_concurrency.lockutils [None req-eb7b956f-b805-4696-91cf-83b33daf0afe tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.089s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.478844] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.538s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.479763] env[62519]: DEBUG nova.compute.manager [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1634.488795] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.520s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.489347] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.489899] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.491833] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.495722] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.305s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.505943] env[62519]: INFO nova.compute.claims [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1634.510105] env[62519]: INFO nova.compute.manager [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Terminating instance [ 1634.539193] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802609, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.543844] env[62519]: DEBUG oslo_vmware.api [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802608, 'name': PowerOffVM_Task, 'duration_secs': 0.372469} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.544335] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1634.544486] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1634.544783] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78d4d8ac-738b-4440-bc1d-ddfc191cd799 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.743025] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1634.743025] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1634.743025] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Deleting the datastore file [datastore1] 540167be-5295-4e28-9b25-16317746dd0e {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1634.743025] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28fc0eeb-af05-4d46-9ddd-6d4991758161 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.758021] env[62519]: DEBUG oslo_vmware.api [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for the task: (returnval){ [ 1634.758021] env[62519]: value = "task-1802611" [ 1634.758021] env[62519]: _type = "Task" [ 1634.758021] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.772305] env[62519]: DEBUG oslo_vmware.api [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.012129] env[62519]: DEBUG nova.compute.utils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1635.013614] env[62519]: DEBUG nova.compute.manager [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1635.016559] env[62519]: DEBUG nova.compute.manager [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1635.017015] env[62519]: DEBUG nova.network.neutron [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1635.030594] env[62519]: DEBUG nova.compute.manager [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1635.030811] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1635.032258] env[62519]: DEBUG oslo_concurrency.lockutils [None req-484beabb-56ac-464b-a93a-122561b99fee tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.194s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.041258] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5097f51d-bf87-4d9c-8767-42d4899c5d6f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.052365] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5e107b91-7fe0-4ba5-92d7-9609ca3a3d2d tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "5786ea51-6499-4460-a123-a038ddcde8b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.377s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.061069] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1635.065143] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7a95749-963b-4f52-9a3d-714693e2c94f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.066649] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802609, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.632794} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.067596] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ed716912-752e-4c6d-b6c6-fb349668fa93/ed716912-752e-4c6d-b6c6-fb349668fa93.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1635.067847] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1635.068568] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f969e31-3b7c-4899-a50f-645d61ff02e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.074577] env[62519]: DEBUG oslo_vmware.api [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1635.074577] env[62519]: value = "task-1802612" [ 1635.074577] env[62519]: _type = "Task" [ 1635.074577] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.081236] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1635.081236] env[62519]: value = "task-1802613" [ 1635.081236] env[62519]: _type = "Task" [ 1635.081236] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.094033] env[62519]: DEBUG oslo_vmware.api [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802612, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.102466] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802613, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.162766] env[62519]: DEBUG nova.policy [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '053a30aaf81b4cbd8ced7018ebfe1f40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e755fb5a6e94068b6c99b1638081f5f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1635.270238] env[62519]: DEBUG oslo_vmware.api [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Task: {'id': task-1802611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.307492} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.270916] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1635.271505] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1635.272129] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1635.272516] env[62519]: INFO nova.compute.manager [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1635.273123] env[62519]: DEBUG oslo.service.loopingcall [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1635.273532] env[62519]: DEBUG nova.compute.manager [-] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1635.273816] env[62519]: DEBUG nova.network.neutron [-] [instance: 540167be-5295-4e28-9b25-16317746dd0e] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1635.460292] env[62519]: DEBUG nova.network.neutron [req-4d4a276f-e77d-45e7-8908-302c2be5b1ef req-8b7576c2-0636-4446-89e1-5ef34879cc34 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Updated VIF entry in instance network info cache for port bcc785a2-3385-4fe4-85fc-7540000eb36b. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1635.460292] env[62519]: DEBUG nova.network.neutron [req-4d4a276f-e77d-45e7-8908-302c2be5b1ef req-8b7576c2-0636-4446-89e1-5ef34879cc34 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Updating instance_info_cache with network_info: [{"id": "bcc785a2-3385-4fe4-85fc-7540000eb36b", "address": "fa:16:3e:9d:ab:f3", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcc785a2-33", "ovs_interfaceid": "bcc785a2-3385-4fe4-85fc-7540000eb36b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1635.519883] env[62519]: DEBUG nova.compute.manager [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1635.544215] env[62519]: DEBUG oslo_concurrency.lockutils [None req-efec4a27-e1ca-4ade-8ea9-071f6e4c700f tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "bace23b3-b7f4-4f3b-8986-0076440d096d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.859s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.554901] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.592596] env[62519]: DEBUG oslo_vmware.api [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802612, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.602588] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802613, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095395} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.602880] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1635.603994] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebdc9eb-3037-44f3-92e3-765684fc70d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.631126] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] ed716912-752e-4c6d-b6c6-fb349668fa93/ed716912-752e-4c6d-b6c6-fb349668fa93.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1635.636654] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e7e511e-7cec-4818-876d-069de9e7410a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.662782] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1635.662782] env[62519]: value = "task-1802614" [ 1635.662782] env[62519]: _type = "Task" [ 1635.662782] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.682446] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802614, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.915268] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.915595] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.915865] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.916102] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.916234] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.922393] env[62519]: INFO nova.compute.manager [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Terminating instance [ 1635.967327] env[62519]: DEBUG oslo_concurrency.lockutils [req-4d4a276f-e77d-45e7-8908-302c2be5b1ef req-8b7576c2-0636-4446-89e1-5ef34879cc34 service nova] Releasing lock "refresh_cache-46b3a0fb-29f6-4b66-a091-2d125b69d109" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1636.000172] env[62519]: DEBUG nova.network.neutron [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Successfully created port: f0e6d0f1-55f9-41b4-83ca-fca156a01c8c {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1636.092786] env[62519]: DEBUG oslo_vmware.api [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802612, 'name': PowerOffVM_Task, 'duration_secs': 0.915893} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.092786] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1636.092786] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1636.093187] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fa22efb-26e1-4b51-914a-bb9b7c912855 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.155659] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cbdb81-cef2-48b8-a1db-03b1499431eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.169903] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1f869b-e75a-4ea1-81c5-b139e4952b71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.178618] env[62519]: DEBUG nova.compute.manager [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Received event network-changed-e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1636.178618] env[62519]: DEBUG nova.compute.manager [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Refreshing instance network info cache due to event network-changed-e252f987-8815-4905-a206-237207d5ac4b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1636.178618] env[62519]: DEBUG oslo_concurrency.lockutils [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] Acquiring lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.178618] env[62519]: DEBUG oslo_concurrency.lockutils [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] Acquired lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.178823] env[62519]: DEBUG nova.network.neutron [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Refreshing network info cache for port e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1636.212812] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.217301] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6728cc05-ef94-4758-8a4b-b5dba7186618 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.227014] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4d73a8-6b49-4c13-a4d4-0d53c025394f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.245008] env[62519]: DEBUG nova.compute.provider_tree [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.396459] env[62519]: DEBUG nova.network.neutron [-] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.422366] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1636.422366] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1636.422366] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Deleting the datastore file [datastore1] d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1636.426035] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6adcc90c-ca22-43d2-b910-2a06043e3fbe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.429311] env[62519]: DEBUG nova.compute.manager [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1636.429514] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1636.430414] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6deb8a2-c11f-424b-89a2-4f7895e21925 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.439024] env[62519]: DEBUG oslo_vmware.api [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for the task: (returnval){ [ 1636.439024] env[62519]: value = "task-1802616" [ 1636.439024] env[62519]: _type = "Task" [ 1636.439024] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.441867] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1636.442522] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82d45181-9aae-451d-9b05-5350167df92b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.448274] env[62519]: DEBUG oslo_vmware.api [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802616, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.459818] env[62519]: DEBUG oslo_vmware.api [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1636.459818] env[62519]: value = "task-1802617" [ 1636.459818] env[62519]: _type = "Task" [ 1636.459818] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.471483] env[62519]: DEBUG oslo_vmware.api [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802617, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.526670] env[62519]: DEBUG nova.compute.manager [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1636.576280] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1636.576280] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1636.576280] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1636.576280] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1636.576513] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1636.576513] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1636.576731] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1636.576852] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1636.580133] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1636.580350] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1636.581286] env[62519]: DEBUG nova.virt.hardware [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1636.581473] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf014e86-c2e8-4ff5-b89d-7d63788dca6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.592028] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc9301c-ac27-45a3-b761-56f9dacc44dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.678548] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802614, 'name': ReconfigVM_Task, 'duration_secs': 0.59732} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.678921] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Reconfigured VM instance instance-0000003c to attach disk [datastore1] ed716912-752e-4c6d-b6c6-fb349668fa93/ed716912-752e-4c6d-b6c6-fb349668fa93.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1636.679659] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f14fc8cb-92ff-4a8d-bcf5-b8ef4af1b289 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.688340] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1636.688340] env[62519]: value = "task-1802618" [ 1636.688340] env[62519]: _type = "Task" [ 1636.688340] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.701233] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802618, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.751751] env[62519]: DEBUG nova.scheduler.client.report [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1636.753742] env[62519]: DEBUG nova.network.neutron [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Updated VIF entry in instance network info cache for port e252f987-8815-4905-a206-237207d5ac4b. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1636.753742] env[62519]: DEBUG nova.network.neutron [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Updating instance_info_cache with network_info: [{"id": "e252f987-8815-4905-a206-237207d5ac4b", "address": "fa:16:3e:52:cb:aa", "network": {"id": "7ee257b5-e74c-4a05-bbf8-b502402717ed", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-630407603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3038814dc07448fca423fb8c8dbe42c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape252f987-88", "ovs_interfaceid": "e252f987-8815-4905-a206-237207d5ac4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.899396] env[62519]: INFO nova.compute.manager [-] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Took 1.63 seconds to deallocate network for instance. [ 1636.951854] env[62519]: DEBUG oslo_vmware.api [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Task: {'id': task-1802616, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30653} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.954105] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1636.954105] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1636.954226] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1636.954689] env[62519]: INFO nova.compute.manager [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Took 1.92 seconds to destroy the instance on the hypervisor. [ 1636.954953] env[62519]: DEBUG oslo.service.loopingcall [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1636.955317] env[62519]: DEBUG nova.compute.manager [-] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1636.955423] env[62519]: DEBUG nova.network.neutron [-] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1636.972674] env[62519]: DEBUG oslo_vmware.api [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802617, 'name': PowerOffVM_Task, 'duration_secs': 0.295235} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.972986] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1636.973175] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1636.973519] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8592c4a-d2f0-4e24-b0a9-efed05d06141 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.181766] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1637.183024] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1637.183024] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Deleting the datastore file [datastore1] 4a29bff8-050a-4ad5-9d06-3a59c40b97ee {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1637.186579] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e88b93a3-ce74-43bb-88c5-f91448c34f19 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.194868] env[62519]: DEBUG oslo_vmware.api [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for the task: (returnval){ [ 1637.194868] env[62519]: value = "task-1802620" [ 1637.194868] env[62519]: _type = "Task" [ 1637.194868] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.207581] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802618, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.214990] env[62519]: DEBUG oslo_vmware.api [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802620, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.259417] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.259417] env[62519]: DEBUG nova.compute.manager [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1637.263378] env[62519]: DEBUG oslo_concurrency.lockutils [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] Releasing lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1637.263803] env[62519]: DEBUG nova.compute.manager [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Received event network-vif-deleted-028b3466-6a8c-46ec-81a2-43206a5852ec {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1637.264188] env[62519]: INFO nova.compute.manager [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Neutron deleted interface 028b3466-6a8c-46ec-81a2-43206a5852ec; detaching it from the instance and deleting it from the info cache [ 1637.264626] env[62519]: DEBUG nova.network.neutron [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.269409] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.265s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.274819] env[62519]: INFO nova.compute.claims [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1637.408111] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.703982] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802618, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.709833] env[62519]: DEBUG oslo_vmware.api [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Task: {'id': task-1802620, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294014} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.710102] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1637.710286] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1637.710456] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1637.710624] env[62519]: INFO nova.compute.manager [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1637.710866] env[62519]: DEBUG oslo.service.loopingcall [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1637.711070] env[62519]: DEBUG nova.compute.manager [-] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1637.711168] env[62519]: DEBUG nova.network.neutron [-] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1637.764418] env[62519]: DEBUG nova.compute.utils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1637.765763] env[62519]: DEBUG nova.compute.manager [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1637.769024] env[62519]: DEBUG nova.network.neutron [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1637.772256] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3bf68b55-52c3-47a6-88ef-a4280439ef4f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.785079] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64373768-3938-4ab1-8ee9-28cc402e82da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.833371] env[62519]: DEBUG nova.compute.manager [req-46393b5b-f420-4596-b07b-0f3dc059a089 req-e367dc0a-1397-4d07-bfc6-f8ff216be474 service nova] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Detach interface failed, port_id=028b3466-6a8c-46ec-81a2-43206a5852ec, reason: Instance 540167be-5295-4e28-9b25-16317746dd0e could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1637.887825] env[62519]: DEBUG nova.policy [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '646dceb574c845f19ca0d9dca4874dc1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91c1732890db42f98f538f7a5ac0d542', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1638.028502] env[62519]: DEBUG nova.network.neutron [-] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.203154] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802618, 'name': Rename_Task, 'duration_secs': 1.237211} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.204980] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1638.204980] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fda7f0df-d70b-4cec-869e-e6951e3dc041 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.216097] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1638.216097] env[62519]: value = "task-1802621" [ 1638.216097] env[62519]: _type = "Task" [ 1638.216097] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.227171] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802621, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.272990] env[62519]: DEBUG nova.compute.manager [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1638.310084] env[62519]: DEBUG nova.network.neutron [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Successfully updated port: f0e6d0f1-55f9-41b4-83ca-fca156a01c8c {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1638.443347] env[62519]: DEBUG nova.compute.manager [req-38c25042-fdb7-403f-959a-d9cc932e3605 req-ffd526e4-1da2-4487-9c75-e96b95bfbdcb service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Received event network-vif-deleted-d146cb22-9f7c-47f1-bcdf-fca468f1d2df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1638.443347] env[62519]: DEBUG nova.compute.manager [req-38c25042-fdb7-403f-959a-d9cc932e3605 req-ffd526e4-1da2-4487-9c75-e96b95bfbdcb service nova] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Received event network-vif-deleted-260a58db-3019-4382-9551-1787dc70912c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1638.446111] env[62519]: DEBUG nova.compute.manager [req-38c25042-fdb7-403f-959a-d9cc932e3605 req-ffd526e4-1da2-4487-9c75-e96b95bfbdcb service nova] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Received event network-vif-plugged-f0e6d0f1-55f9-41b4-83ca-fca156a01c8c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1638.446111] env[62519]: DEBUG oslo_concurrency.lockutils [req-38c25042-fdb7-403f-959a-d9cc932e3605 req-ffd526e4-1da2-4487-9c75-e96b95bfbdcb service nova] Acquiring lock "417588f8-6288-4ecd-9764-dbc923549c5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.446111] env[62519]: DEBUG oslo_concurrency.lockutils [req-38c25042-fdb7-403f-959a-d9cc932e3605 req-ffd526e4-1da2-4487-9c75-e96b95bfbdcb service nova] Lock "417588f8-6288-4ecd-9764-dbc923549c5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.446111] env[62519]: DEBUG oslo_concurrency.lockutils [req-38c25042-fdb7-403f-959a-d9cc932e3605 req-ffd526e4-1da2-4487-9c75-e96b95bfbdcb service nova] Lock "417588f8-6288-4ecd-9764-dbc923549c5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.446111] env[62519]: DEBUG nova.compute.manager [req-38c25042-fdb7-403f-959a-d9cc932e3605 req-ffd526e4-1da2-4487-9c75-e96b95bfbdcb service nova] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] No waiting events found dispatching network-vif-plugged-f0e6d0f1-55f9-41b4-83ca-fca156a01c8c {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1638.446111] env[62519]: WARNING nova.compute.manager [req-38c25042-fdb7-403f-959a-d9cc932e3605 req-ffd526e4-1da2-4487-9c75-e96b95bfbdcb service nova] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Received unexpected event network-vif-plugged-f0e6d0f1-55f9-41b4-83ca-fca156a01c8c for instance with vm_state building and task_state spawning. [ 1638.459374] env[62519]: DEBUG nova.network.neutron [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Successfully created port: 55cb0499-3e6e-42ef-bd75-edafccb32e03 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1638.535976] env[62519]: INFO nova.compute.manager [-] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Took 1.58 seconds to deallocate network for instance. [ 1638.728073] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802621, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.759979] env[62519]: DEBUG nova.network.neutron [-] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.813742] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "refresh_cache-417588f8-6288-4ecd-9764-dbc923549c5d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.813742] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "refresh_cache-417588f8-6288-4ecd-9764-dbc923549c5d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.813742] env[62519]: DEBUG nova.network.neutron [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1638.877743] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4912ec-9b2d-4cbe-93c1-5d5756d3269c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.888555] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9b8839-15b7-4fb5-b06e-fdd6f5c50530 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.928339] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9eabb8-0986-4324-b6c8-b8ff5e4df08b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.934729] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d08a7d-9229-4fa4-84dc-827d33be4527 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.954024] env[62519]: DEBUG nova.compute.provider_tree [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1639.051174] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.227976] env[62519]: DEBUG oslo_vmware.api [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802621, 'name': PowerOnVM_Task, 'duration_secs': 0.789585} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.228345] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1639.228554] env[62519]: INFO nova.compute.manager [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Took 10.75 seconds to spawn the instance on the hypervisor. [ 1639.228729] env[62519]: DEBUG nova.compute.manager [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1639.229560] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee3efeb-c724-4046-93ae-130ebac9e040 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.266472] env[62519]: INFO nova.compute.manager [-] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Took 1.55 seconds to deallocate network for instance. [ 1639.287760] env[62519]: DEBUG nova.compute.manager [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1639.315440] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1639.315733] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1639.315963] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1639.316201] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1639.316361] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1639.316510] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1639.316723] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1639.316907] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1639.317167] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1639.317413] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1639.317616] env[62519]: DEBUG nova.virt.hardware [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1639.320887] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e12dd5-bcf3-444a-8459-f5e55f302f70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.330894] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115d57df-3194-4e02-a203-9ba5a55d1466 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.363059] env[62519]: DEBUG nova.network.neutron [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1639.458125] env[62519]: DEBUG nova.scheduler.client.report [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1639.531327] env[62519]: DEBUG nova.network.neutron [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Updating instance_info_cache with network_info: [{"id": "f0e6d0f1-55f9-41b4-83ca-fca156a01c8c", "address": "fa:16:3e:c6:df:f2", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0e6d0f1-55", "ovs_interfaceid": "f0e6d0f1-55f9-41b4-83ca-fca156a01c8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.541827] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "9f71845a-e80c-4822-b3de-717f1d83bc49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.541948] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.750309] env[62519]: INFO nova.compute.manager [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Took 38.40 seconds to build instance. [ 1639.774669] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.966278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.966278] env[62519]: DEBUG nova.compute.manager [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1639.966837] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.613s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.969518] env[62519]: INFO nova.compute.claims [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1640.034363] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "refresh_cache-417588f8-6288-4ecd-9764-dbc923549c5d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.034750] env[62519]: DEBUG nova.compute.manager [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Instance network_info: |[{"id": "f0e6d0f1-55f9-41b4-83ca-fca156a01c8c", "address": "fa:16:3e:c6:df:f2", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0e6d0f1-55", "ovs_interfaceid": "f0e6d0f1-55f9-41b4-83ca-fca156a01c8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1640.035165] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:df:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0e6d0f1-55f9-41b4-83ca-fca156a01c8c', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1640.044612] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Creating folder: Project (0e755fb5a6e94068b6c99b1638081f5f). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1640.045808] env[62519]: DEBUG nova.network.neutron [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Successfully updated port: 55cb0499-3e6e-42ef-bd75-edafccb32e03 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1640.047217] env[62519]: DEBUG nova.compute.manager [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1640.055131] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0cdc2cc4-5e9f-4547-81ef-a82a11e8ad7f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.073642] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Created folder: Project (0e755fb5a6e94068b6c99b1638081f5f) in parent group-v373567. [ 1640.073642] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Creating folder: Instances. Parent ref: group-v373739. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1640.073642] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-270d9e0f-e2ef-4e3b-83dd-4ecea4727c5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.092183] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Created folder: Instances in parent group-v373739. [ 1640.092556] env[62519]: DEBUG oslo.service.loopingcall [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.092842] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1640.093239] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2689c12a-75e5-4f35-872b-77e2cf409997 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.119477] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1640.119477] env[62519]: value = "task-1802624" [ 1640.119477] env[62519]: _type = "Task" [ 1640.119477] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.129844] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802624, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.474558] env[62519]: DEBUG nova.compute.manager [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Received event network-vif-deleted-53db4677-3e45-4941-aa9d-ab7dee4a3da6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1640.474996] env[62519]: DEBUG nova.compute.manager [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Received event network-changed-f0e6d0f1-55f9-41b4-83ca-fca156a01c8c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1640.475123] env[62519]: DEBUG nova.compute.manager [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Refreshing instance network info cache due to event network-changed-f0e6d0f1-55f9-41b4-83ca-fca156a01c8c. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1640.475399] env[62519]: DEBUG oslo_concurrency.lockutils [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] Acquiring lock "refresh_cache-417588f8-6288-4ecd-9764-dbc923549c5d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.475703] env[62519]: DEBUG oslo_concurrency.lockutils [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] Acquired lock "refresh_cache-417588f8-6288-4ecd-9764-dbc923549c5d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.475883] env[62519]: DEBUG nova.network.neutron [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Refreshing network info cache for port f0e6d0f1-55f9-41b4-83ca-fca156a01c8c {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1640.480533] env[62519]: DEBUG nova.compute.utils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1640.482563] env[62519]: DEBUG nova.compute.manager [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1640.482748] env[62519]: DEBUG nova.network.neutron [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1640.536731] env[62519]: DEBUG nova.policy [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '990a3c2fb42c4ae0ba254909d97c220d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b316f426b6f64dff9563b62f285cd6b0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1640.559097] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.559097] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.559306] env[62519]: DEBUG nova.network.neutron [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1640.586380] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.631438] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802624, 'name': CreateVM_Task, 'duration_secs': 0.501975} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.631622] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1640.632318] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.632508] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.632821] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1640.633105] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f19262d9-482a-4b8c-be30-7447a100d65a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.639113] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1640.639113] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525a8820-2072-68dd-541a-26eb5efed40d" [ 1640.639113] env[62519]: _type = "Task" [ 1640.639113] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.648483] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525a8820-2072-68dd-541a-26eb5efed40d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.676520] env[62519]: DEBUG oslo_vmware.rw_handles [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522156cd-e48a-fffd-21fd-7097b3bed776/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1640.677628] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f971efb-5ddf-4dfe-9d87-c85bf5ebb8ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.687567] env[62519]: DEBUG oslo_vmware.rw_handles [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522156cd-e48a-fffd-21fd-7097b3bed776/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1640.687567] env[62519]: ERROR oslo_vmware.rw_handles [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522156cd-e48a-fffd-21fd-7097b3bed776/disk-0.vmdk due to incomplete transfer. [ 1640.687567] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-840098a7-e601-47c4-968e-3a5f982ff01b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.699798] env[62519]: DEBUG oslo_vmware.rw_handles [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522156cd-e48a-fffd-21fd-7097b3bed776/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1640.700078] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Uploaded image b36eba9a-e231-4006-9afd-ecb0aaa21542 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1640.702734] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1640.703493] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-83a87f19-c8a3-4c0d-985f-e60e59a2fe67 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.715309] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1640.715309] env[62519]: value = "task-1802625" [ 1640.715309] env[62519]: _type = "Task" [ 1640.715309] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.726512] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802625, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.857154] env[62519]: DEBUG nova.network.neutron [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Successfully created port: b025403f-8225-412e-908e-f5078a442ed2 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1640.988414] env[62519]: DEBUG nova.compute.manager [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1641.120613] env[62519]: DEBUG nova.network.neutron [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1641.152551] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525a8820-2072-68dd-541a-26eb5efed40d, 'name': SearchDatastore_Task, 'duration_secs': 0.085929} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.152879] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.153132] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1641.153790] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.153950] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.154187] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1641.154435] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d881d5d-c9f5-4d21-8302-b45b9876b3e2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.176507] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1641.176710] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1641.177614] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6ebe35d-6091-4262-a491-48a743da2108 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.184793] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1641.184793] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523b91fb-d4af-4f05-c294-db9334c064c3" [ 1641.184793] env[62519]: _type = "Task" [ 1641.184793] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.196075] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523b91fb-d4af-4f05-c294-db9334c064c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.227652] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802625, 'name': Destroy_Task, 'duration_secs': 0.489793} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.227966] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Destroyed the VM [ 1641.228254] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1641.228531] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dfa1a443-d8ba-4955-bea8-907c94b01a5d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.239652] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1641.239652] env[62519]: value = "task-1802626" [ 1641.239652] env[62519]: _type = "Task" [ 1641.239652] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.252507] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802626, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.263507] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6e183849-567e-4bbd-988c-d4ca57b40836 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "ed716912-752e-4c6d-b6c6-fb349668fa93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.295s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.418261] env[62519]: DEBUG nova.network.neutron [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Updated VIF entry in instance network info cache for port f0e6d0f1-55f9-41b4-83ca-fca156a01c8c. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1641.418381] env[62519]: DEBUG nova.network.neutron [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Updating instance_info_cache with network_info: [{"id": "f0e6d0f1-55f9-41b4-83ca-fca156a01c8c", "address": "fa:16:3e:c6:df:f2", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0e6d0f1-55", "ovs_interfaceid": "f0e6d0f1-55f9-41b4-83ca-fca156a01c8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.425485] env[62519]: DEBUG nova.network.neutron [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updating instance_info_cache with network_info: [{"id": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "address": "fa:16:3e:cb:3e:93", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cb0499-3e", "ovs_interfaceid": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.596840] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339ffb62-898b-4e70-8fb0-7aa1049306bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.605726] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a26561a-499e-49f0-a53f-891399873eaa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.639472] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95d616e-a572-4b56-9c14-01b86c475cc3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.648497] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7532ea44-9bfa-48aa-a3bb-303e045d9378 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.664423] env[62519]: DEBUG nova.compute.provider_tree [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1641.699282] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523b91fb-d4af-4f05-c294-db9334c064c3, 'name': SearchDatastore_Task, 'duration_secs': 0.046977} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.700426] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0761a21-c12d-4ba1-8d67-6d7d9dc033c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.706507] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1641.706507] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52617ce4-d5c2-bebe-e4f0-131e58932dd1" [ 1641.706507] env[62519]: _type = "Task" [ 1641.706507] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.715798] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52617ce4-d5c2-bebe-e4f0-131e58932dd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.750055] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802626, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.921192] env[62519]: DEBUG oslo_concurrency.lockutils [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] Releasing lock "refresh_cache-417588f8-6288-4ecd-9764-dbc923549c5d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.921547] env[62519]: DEBUG nova.compute.manager [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Received event network-vif-plugged-55cb0499-3e6e-42ef-bd75-edafccb32e03 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1641.921781] env[62519]: DEBUG oslo_concurrency.lockutils [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.922043] env[62519]: DEBUG oslo_concurrency.lockutils [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.922251] env[62519]: DEBUG oslo_concurrency.lockutils [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.922431] env[62519]: DEBUG nova.compute.manager [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] No waiting events found dispatching network-vif-plugged-55cb0499-3e6e-42ef-bd75-edafccb32e03 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1641.922627] env[62519]: WARNING nova.compute.manager [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Received unexpected event network-vif-plugged-55cb0499-3e6e-42ef-bd75-edafccb32e03 for instance with vm_state building and task_state spawning. [ 1641.922800] env[62519]: DEBUG nova.compute.manager [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Received event network-changed-55cb0499-3e6e-42ef-bd75-edafccb32e03 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1641.922981] env[62519]: DEBUG nova.compute.manager [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Refreshing instance network info cache due to event network-changed-55cb0499-3e6e-42ef-bd75-edafccb32e03. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1641.923170] env[62519]: DEBUG oslo_concurrency.lockutils [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] Acquiring lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.927826] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Releasing lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.928181] env[62519]: DEBUG nova.compute.manager [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Instance network_info: |[{"id": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "address": "fa:16:3e:cb:3e:93", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cb0499-3e", "ovs_interfaceid": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1641.928704] env[62519]: DEBUG oslo_concurrency.lockutils [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] Acquired lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.928880] env[62519]: DEBUG nova.network.neutron [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Refreshing network info cache for port 55cb0499-3e6e-42ef-bd75-edafccb32e03 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1641.930322] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:3e:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2bf99f85-3a5c-47c6-a603-e215be6ab0bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55cb0499-3e6e-42ef-bd75-edafccb32e03', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1641.938391] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Creating folder: Project (91c1732890db42f98f538f7a5ac0d542). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1641.939855] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b27d38b-3f8b-4d82-99ff-bf0408fd864d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.956785] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Created folder: Project (91c1732890db42f98f538f7a5ac0d542) in parent group-v373567. [ 1641.956981] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Creating folder: Instances. Parent ref: group-v373742. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1641.957566] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-879baa86-3b27-4b9d-a664-da62575684a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.971739] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Created folder: Instances in parent group-v373742. [ 1641.972376] env[62519]: DEBUG oslo.service.loopingcall [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1641.972376] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1641.972534] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8107f14-8053-42f4-871c-baf9a214b5b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.995924] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1641.995924] env[62519]: value = "task-1802629" [ 1641.995924] env[62519]: _type = "Task" [ 1641.995924] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.002292] env[62519]: DEBUG nova.compute.manager [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1642.009831] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802629, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.032732] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1642.033268] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1642.033449] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1642.033765] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1642.034027] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1642.034262] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1642.034575] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1642.034834] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1642.035284] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1642.035458] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1642.035779] env[62519]: DEBUG nova.virt.hardware [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1642.037043] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e070aa19-3dc6-40f5-8f69-7884d72443d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.046016] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f6b4ea-cfe6-4cf7-b54a-bd85db02cba4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.167606] env[62519]: DEBUG nova.scheduler.client.report [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1642.218238] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52617ce4-d5c2-bebe-e4f0-131e58932dd1, 'name': SearchDatastore_Task, 'duration_secs': 0.017722} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.218545] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.218807] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 417588f8-6288-4ecd-9764-dbc923549c5d/417588f8-6288-4ecd-9764-dbc923549c5d.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1642.219086] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe57d1d1-db38-4d81-9b4e-c961aeed5060 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.226454] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1642.226454] env[62519]: value = "task-1802630" [ 1642.226454] env[62519]: _type = "Task" [ 1642.226454] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.236768] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802630, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.249460] env[62519]: DEBUG oslo_vmware.api [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802626, 'name': RemoveSnapshot_Task, 'duration_secs': 0.880871} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.249759] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1642.249979] env[62519]: INFO nova.compute.manager [None req-1b7ebb10-9719-4c2b-be48-1e9d9d400c00 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Took 15.52 seconds to snapshot the instance on the hypervisor. [ 1642.429393] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "54a8aa34-1595-4494-ba68-6915611631ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.429730] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "54a8aa34-1595-4494-ba68-6915611631ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.460521] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "ed716912-752e-4c6d-b6c6-fb349668fa93" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.460777] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "ed716912-752e-4c6d-b6c6-fb349668fa93" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.461078] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "ed716912-752e-4c6d-b6c6-fb349668fa93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.461277] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "ed716912-752e-4c6d-b6c6-fb349668fa93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.461451] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "ed716912-752e-4c6d-b6c6-fb349668fa93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.464935] env[62519]: INFO nova.compute.manager [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Terminating instance [ 1642.509030] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802629, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.547637] env[62519]: DEBUG nova.compute.manager [req-e8e99afc-5350-4906-9a9b-f8350affaebe req-9ef9b503-7dc9-43c9-b8f0-eea3049d7536 service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Received event network-vif-plugged-b025403f-8225-412e-908e-f5078a442ed2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1642.547912] env[62519]: DEBUG oslo_concurrency.lockutils [req-e8e99afc-5350-4906-9a9b-f8350affaebe req-9ef9b503-7dc9-43c9-b8f0-eea3049d7536 service nova] Acquiring lock "8659f63a-5df9-4ff8-84dd-0722026dc820-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.548205] env[62519]: DEBUG oslo_concurrency.lockutils [req-e8e99afc-5350-4906-9a9b-f8350affaebe req-9ef9b503-7dc9-43c9-b8f0-eea3049d7536 service nova] Lock "8659f63a-5df9-4ff8-84dd-0722026dc820-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.548307] env[62519]: DEBUG oslo_concurrency.lockutils [req-e8e99afc-5350-4906-9a9b-f8350affaebe req-9ef9b503-7dc9-43c9-b8f0-eea3049d7536 service nova] Lock "8659f63a-5df9-4ff8-84dd-0722026dc820-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.548480] env[62519]: DEBUG nova.compute.manager [req-e8e99afc-5350-4906-9a9b-f8350affaebe req-9ef9b503-7dc9-43c9-b8f0-eea3049d7536 service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] No waiting events found dispatching network-vif-plugged-b025403f-8225-412e-908e-f5078a442ed2 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1642.548625] env[62519]: WARNING nova.compute.manager [req-e8e99afc-5350-4906-9a9b-f8350affaebe req-9ef9b503-7dc9-43c9-b8f0-eea3049d7536 service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Received unexpected event network-vif-plugged-b025403f-8225-412e-908e-f5078a442ed2 for instance with vm_state building and task_state spawning. [ 1642.676337] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.676337] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.064s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.676337] env[62519]: DEBUG nova.objects.instance [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1642.680672] env[62519]: DEBUG nova.network.neutron [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Successfully updated port: b025403f-8225-412e-908e-f5078a442ed2 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1642.736246] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802630, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.744446] env[62519]: DEBUG nova.network.neutron [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updated VIF entry in instance network info cache for port 55cb0499-3e6e-42ef-bd75-edafccb32e03. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1642.745025] env[62519]: DEBUG nova.network.neutron [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updating instance_info_cache with network_info: [{"id": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "address": "fa:16:3e:cb:3e:93", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cb0499-3e", "ovs_interfaceid": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1642.932817] env[62519]: DEBUG nova.compute.manager [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1642.969850] env[62519]: DEBUG nova.compute.manager [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1642.970177] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1642.971249] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ef16e3-af4e-4790-8b9b-a2779c4a9cc5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.981245] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1642.982282] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebab75f0-b27a-4425-8a71-8f014dad8baa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.990727] env[62519]: DEBUG oslo_vmware.api [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1642.990727] env[62519]: value = "task-1802631" [ 1642.990727] env[62519]: _type = "Task" [ 1642.990727] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.005210] env[62519]: DEBUG oslo_vmware.api [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.013789] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802629, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.181245] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquiring lock "60874391-fa8c-445d-8379-0bc79e41d864" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.181513] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "60874391-fa8c-445d-8379-0bc79e41d864" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.185903] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquiring lock "refresh_cache-8659f63a-5df9-4ff8-84dd-0722026dc820" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.187157] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquired lock "refresh_cache-8659f63a-5df9-4ff8-84dd-0722026dc820" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.187157] env[62519]: DEBUG nova.network.neutron [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1643.237923] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802630, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.875935} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.238183] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 417588f8-6288-4ecd-9764-dbc923549c5d/417588f8-6288-4ecd-9764-dbc923549c5d.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1643.238416] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1643.238681] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-efb4de4d-9fbd-42c6-8f61-b749df70089c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.246296] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1643.246296] env[62519]: value = "task-1802632" [ 1643.246296] env[62519]: _type = "Task" [ 1643.246296] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.250528] env[62519]: DEBUG oslo_concurrency.lockutils [req-63410876-f56c-4f4e-99aa-04bb12a064ca req-ce063147-e9a0-4764-8b57-cc58b6abbffd service nova] Releasing lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.259232] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.456265] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.501337] env[62519]: DEBUG oslo_vmware.api [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802631, 'name': PowerOffVM_Task, 'duration_secs': 0.280039} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.504307] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1643.504483] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1643.504721] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7d3969d-6d3e-4be1-b3d1-bc1406b33c43 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.511911] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802629, 'name': CreateVM_Task, 'duration_secs': 1.139589} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.512074] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1643.512723] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.512888] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.513239] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1643.513470] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6abab4c1-9024-49ff-bd45-197a7e967dfc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.517821] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1643.517821] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f9aacb-4179-fe1e-3598-6c2d33aa1b50" [ 1643.517821] env[62519]: _type = "Task" [ 1643.517821] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.527149] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f9aacb-4179-fe1e-3598-6c2d33aa1b50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.627996] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1643.628249] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1643.628437] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Deleting the datastore file [datastore1] ed716912-752e-4c6d-b6c6-fb349668fa93 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1643.628721] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3db8cb60-730e-4820-a7f0-8fd21dde1bdd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.639803] env[62519]: DEBUG oslo_vmware.api [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1643.639803] env[62519]: value = "task-1802634" [ 1643.639803] env[62519]: _type = "Task" [ 1643.639803] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.649022] env[62519]: DEBUG oslo_vmware.api [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802634, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.688632] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "60874391-fa8c-445d-8379-0bc79e41d864" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.507s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.689063] env[62519]: DEBUG nova.compute.manager [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1643.692949] env[62519]: DEBUG oslo_concurrency.lockutils [None req-902bf869-76cc-4e6e-a6eb-33e80a9d9579 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.696145] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.218s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.696358] env[62519]: DEBUG nova.objects.instance [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1643.731369] env[62519]: DEBUG nova.network.neutron [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1643.759802] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069045} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.759802] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1643.762028] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd368d29-fb45-4a28-b9a1-57806e5ffd99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.790842] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 417588f8-6288-4ecd-9764-dbc923549c5d/417588f8-6288-4ecd-9764-dbc923549c5d.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1643.793940] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f24e9d71-6b4a-45c7-8833-3ac02d5b18a4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.816674] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1643.816674] env[62519]: value = "task-1802635" [ 1643.816674] env[62519]: _type = "Task" [ 1643.816674] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.826684] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.921720] env[62519]: DEBUG nova.network.neutron [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Updating instance_info_cache with network_info: [{"id": "b025403f-8225-412e-908e-f5078a442ed2", "address": "fa:16:3e:80:97:a2", "network": {"id": "0481c136-0d30-40ab-b955-26f62cf1ec89", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1099872683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b316f426b6f64dff9563b62f285cd6b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb025403f-82", "ovs_interfaceid": "b025403f-8225-412e-908e-f5078a442ed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.028512] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f9aacb-4179-fe1e-3598-6c2d33aa1b50, 'name': SearchDatastore_Task, 'duration_secs': 0.010797} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.028817] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.029070] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1644.029318] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.029462] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.029650] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1644.029919] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b58ec2d-44f2-4d9d-87f6-bf4d643a76bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.039025] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1644.039202] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1644.039900] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-434c63b3-d26b-4263-8002-e59fb4bd089d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.045428] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1644.045428] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523cc5ab-2ad6-cb54-eb4e-e5f82bd23c6a" [ 1644.045428] env[62519]: _type = "Task" [ 1644.045428] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.053903] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523cc5ab-2ad6-cb54-eb4e-e5f82bd23c6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.152377] env[62519]: DEBUG oslo_vmware.api [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802634, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16166} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.152602] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1644.152786] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1644.152960] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1644.153160] env[62519]: INFO nova.compute.manager [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1644.153403] env[62519]: DEBUG oslo.service.loopingcall [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1644.153604] env[62519]: DEBUG nova.compute.manager [-] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1644.153699] env[62519]: DEBUG nova.network.neutron [-] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1644.199192] env[62519]: DEBUG nova.compute.utils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1644.202048] env[62519]: DEBUG nova.compute.manager [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1644.202145] env[62519]: DEBUG nova.network.neutron [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1644.330145] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802635, 'name': ReconfigVM_Task, 'duration_secs': 0.324039} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.330145] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 417588f8-6288-4ecd-9764-dbc923549c5d/417588f8-6288-4ecd-9764-dbc923549c5d.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1644.330655] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-336a4b7f-b44d-44bc-8334-22e5ce1bc996 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.342311] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1644.342311] env[62519]: value = "task-1802636" [ 1644.342311] env[62519]: _type = "Task" [ 1644.342311] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.356016] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802636, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.424736] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Releasing lock "refresh_cache-8659f63a-5df9-4ff8-84dd-0722026dc820" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.425127] env[62519]: DEBUG nova.compute.manager [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Instance network_info: |[{"id": "b025403f-8225-412e-908e-f5078a442ed2", "address": "fa:16:3e:80:97:a2", "network": {"id": "0481c136-0d30-40ab-b955-26f62cf1ec89", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1099872683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b316f426b6f64dff9563b62f285cd6b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb025403f-82", "ovs_interfaceid": "b025403f-8225-412e-908e-f5078a442ed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1644.425585] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:97:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2624812a-9f9c-461d-8b5f-79bea90c7ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b025403f-8225-412e-908e-f5078a442ed2', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1644.433276] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Creating folder: Project (b316f426b6f64dff9563b62f285cd6b0). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1644.433557] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3056c29-0313-402b-8934-4600f556ab53 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.446215] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Created folder: Project (b316f426b6f64dff9563b62f285cd6b0) in parent group-v373567. [ 1644.446417] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Creating folder: Instances. Parent ref: group-v373745. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1644.446670] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bbf529b-8911-4d1d-ab7b-0c1ebf66dac0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.457519] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Created folder: Instances in parent group-v373745. [ 1644.457765] env[62519]: DEBUG oslo.service.loopingcall [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1644.457971] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1644.458197] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-065116e5-d093-4e34-b3dd-a64022487074 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.473977] env[62519]: DEBUG nova.policy [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4d119aba99c4e9594da6ce32bcfce7d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19cd4d296ffa4f24818759beed87b66e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1644.482813] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1644.482813] env[62519]: value = "task-1802639" [ 1644.482813] env[62519]: _type = "Task" [ 1644.482813] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.491487] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802639, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.557963] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523cc5ab-2ad6-cb54-eb4e-e5f82bd23c6a, 'name': SearchDatastore_Task, 'duration_secs': 0.0134} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.559171] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c260cac0-f57e-4533-b7e4-32d19ad630b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.567546] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1644.567546] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bfba61-0d3f-6422-7dee-76f24eb45278" [ 1644.567546] env[62519]: _type = "Task" [ 1644.567546] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.577088] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bfba61-0d3f-6422-7dee-76f24eb45278, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.693324] env[62519]: DEBUG nova.compute.manager [req-05b3e1a5-fb8f-43ee-957f-18ed8ddb4979 req-b4418fe7-dd88-47f2-bcc0-7e7b24b2e16a service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Received event network-changed-b025403f-8225-412e-908e-f5078a442ed2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1644.693598] env[62519]: DEBUG nova.compute.manager [req-05b3e1a5-fb8f-43ee-957f-18ed8ddb4979 req-b4418fe7-dd88-47f2-bcc0-7e7b24b2e16a service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Refreshing instance network info cache due to event network-changed-b025403f-8225-412e-908e-f5078a442ed2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1644.710017] env[62519]: DEBUG oslo_concurrency.lockutils [req-05b3e1a5-fb8f-43ee-957f-18ed8ddb4979 req-b4418fe7-dd88-47f2-bcc0-7e7b24b2e16a service nova] Acquiring lock "refresh_cache-8659f63a-5df9-4ff8-84dd-0722026dc820" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.710017] env[62519]: DEBUG oslo_concurrency.lockutils [req-05b3e1a5-fb8f-43ee-957f-18ed8ddb4979 req-b4418fe7-dd88-47f2-bcc0-7e7b24b2e16a service nova] Acquired lock "refresh_cache-8659f63a-5df9-4ff8-84dd-0722026dc820" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.710017] env[62519]: DEBUG nova.network.neutron [req-05b3e1a5-fb8f-43ee-957f-18ed8ddb4979 req-b4418fe7-dd88-47f2-bcc0-7e7b24b2e16a service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Refreshing network info cache for port b025403f-8225-412e-908e-f5078a442ed2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1644.710017] env[62519]: DEBUG nova.compute.manager [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1644.710017] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba15048a-4a0e-4e38-9130-d2fe9bd29d34 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.723760] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.677s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.723760] env[62519]: DEBUG nova.objects.instance [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1644.856253] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802636, 'name': Rename_Task, 'duration_secs': 0.243251} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.859943] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1644.859943] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9d1a2c3-d826-4bf5-b897-f241b8c6374d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.868026] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1644.868026] env[62519]: value = "task-1802640" [ 1644.868026] env[62519]: _type = "Task" [ 1644.868026] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.877635] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.903434] env[62519]: DEBUG nova.network.neutron [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Successfully created port: 431f108a-5529-45cb-8053-17140e36c28d {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1644.994549] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802639, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.082407] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bfba61-0d3f-6422-7dee-76f24eb45278, 'name': SearchDatastore_Task, 'duration_secs': 0.01158} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.082737] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1645.083045] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 99f22198-1a65-4d0d-b665-90c7063dbdb9/99f22198-1a65-4d0d-b665-90c7063dbdb9.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1645.083348] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-215acbc0-c759-493e-9795-e98e3aa60f51 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.090697] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1645.090697] env[62519]: value = "task-1802641" [ 1645.090697] env[62519]: _type = "Task" [ 1645.090697] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.101389] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802641, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.241989] env[62519]: DEBUG nova.network.neutron [-] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.378323] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802640, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.451071] env[62519]: DEBUG nova.network.neutron [req-05b3e1a5-fb8f-43ee-957f-18ed8ddb4979 req-b4418fe7-dd88-47f2-bcc0-7e7b24b2e16a service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Updated VIF entry in instance network info cache for port b025403f-8225-412e-908e-f5078a442ed2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1645.451513] env[62519]: DEBUG nova.network.neutron [req-05b3e1a5-fb8f-43ee-957f-18ed8ddb4979 req-b4418fe7-dd88-47f2-bcc0-7e7b24b2e16a service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Updating instance_info_cache with network_info: [{"id": "b025403f-8225-412e-908e-f5078a442ed2", "address": "fa:16:3e:80:97:a2", "network": {"id": "0481c136-0d30-40ab-b955-26f62cf1ec89", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1099872683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b316f426b6f64dff9563b62f285cd6b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2624812a-9f9c-461d-8b5f-79bea90c7ad3", "external-id": "nsx-vlan-transportzone-123", "segmentation_id": 123, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb025403f-82", "ovs_interfaceid": "b025403f-8225-412e-908e-f5078a442ed2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.497351] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802639, 'name': CreateVM_Task, 'duration_secs': 0.597745} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.497579] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1645.498470] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.498655] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.498981] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1645.499342] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8a6a906-2d1c-41d3-be23-ef266700c847 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.506313] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1645.506313] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523a333c-f5b8-eae3-dad1-dea3e2bc77be" [ 1645.506313] env[62519]: _type = "Task" [ 1645.506313] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.516298] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523a333c-f5b8-eae3-dad1-dea3e2bc77be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.603380] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802641, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492736} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.603722] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 99f22198-1a65-4d0d-b665-90c7063dbdb9/99f22198-1a65-4d0d-b665-90c7063dbdb9.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1645.603976] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1645.604274] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dbf158c7-9553-4774-8dd0-b49ab89902b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.611953] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1645.611953] env[62519]: value = "task-1802642" [ 1645.611953] env[62519]: _type = "Task" [ 1645.611953] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.621353] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802642, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.715817] env[62519]: DEBUG nova.compute.manager [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1645.719875] env[62519]: DEBUG oslo_concurrency.lockutils [None req-887fdd97-c9a0-4663-ab52-0f67cebf30b9 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.720940] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.312s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.721181] env[62519]: DEBUG nova.objects.instance [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lazy-loading 'resources' on Instance uuid 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1645.744502] env[62519]: INFO nova.compute.manager [-] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Took 1.59 seconds to deallocate network for instance. [ 1645.750879] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1645.751129] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1645.751315] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1645.751504] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1645.751643] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1645.751787] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1645.751987] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1645.752153] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1645.752317] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1645.752478] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1645.752643] env[62519]: DEBUG nova.virt.hardware [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1645.753493] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5c1a91-dc7c-4d13-9103-4ceb3c7ff0e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.762204] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11009dad-14b5-491d-96bb-802e22c50c64 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.878118] env[62519]: DEBUG oslo_vmware.api [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802640, 'name': PowerOnVM_Task, 'duration_secs': 0.666395} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.878438] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1645.878646] env[62519]: INFO nova.compute.manager [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Took 9.35 seconds to spawn the instance on the hypervisor. [ 1645.878823] env[62519]: DEBUG nova.compute.manager [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1645.879604] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98abbd10-16fb-4b8a-989b-a323d45da2bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.954032] env[62519]: DEBUG oslo_concurrency.lockutils [req-05b3e1a5-fb8f-43ee-957f-18ed8ddb4979 req-b4418fe7-dd88-47f2-bcc0-7e7b24b2e16a service nova] Releasing lock "refresh_cache-8659f63a-5df9-4ff8-84dd-0722026dc820" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.017886] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523a333c-f5b8-eae3-dad1-dea3e2bc77be, 'name': SearchDatastore_Task, 'duration_secs': 0.059126} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.017886] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.017886] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1646.018075] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.018198] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.019406] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1646.019406] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7369631b-c3f3-4c57-b90d-c32e25dc5430 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.028311] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1646.028483] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1646.029228] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f21a0709-8a80-4804-8304-52c360fb5de3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.034935] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1646.034935] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523d48df-3f29-c63b-a708-647a9bff2b5a" [ 1646.034935] env[62519]: _type = "Task" [ 1646.034935] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.043474] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523d48df-3f29-c63b-a708-647a9bff2b5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.122797] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802642, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075129} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.123089] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1646.123872] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4c1f91-b4b3-4572-b7c0-d07680d77fcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.146461] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 99f22198-1a65-4d0d-b665-90c7063dbdb9/99f22198-1a65-4d0d-b665-90c7063dbdb9.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1646.146799] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb18f76b-7ef7-4a84-8f1c-2984c272a55f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.170026] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1646.170026] env[62519]: value = "task-1802643" [ 1646.170026] env[62519]: _type = "Task" [ 1646.170026] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.178034] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802643, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.251962] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.398708] env[62519]: INFO nova.compute.manager [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Took 42.05 seconds to build instance. [ 1646.427095] env[62519]: DEBUG nova.network.neutron [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Successfully updated port: 431f108a-5529-45cb-8053-17140e36c28d {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1646.548110] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523d48df-3f29-c63b-a708-647a9bff2b5a, 'name': SearchDatastore_Task, 'duration_secs': 0.010547} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.548949] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-268ef101-6537-47af-9cc9-70452faead7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.556113] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1646.556113] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521a8467-75e8-01eb-3ad4-d21675bd0b7d" [ 1646.556113] env[62519]: _type = "Task" [ 1646.556113] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.563772] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521a8467-75e8-01eb-3ad4-d21675bd0b7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.682616] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802643, 'name': ReconfigVM_Task, 'duration_secs': 0.291995} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.682961] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 99f22198-1a65-4d0d-b665-90c7063dbdb9/99f22198-1a65-4d0d-b665-90c7063dbdb9.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1646.683584] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a14c1561-4760-4c6c-8a1c-549a16cdfc2a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.686532] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ccd2c1-67f8-416d-85d9-9d2f573dd136 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.693944] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14586ba5-fdc3-4129-819c-4e447feb5882 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.698454] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1646.698454] env[62519]: value = "task-1802644" [ 1646.698454] env[62519]: _type = "Task" [ 1646.698454] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.731116] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9f22f6-df94-45fc-bccf-5e86dfa21ff6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.735177] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802644, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.737645] env[62519]: DEBUG nova.compute.manager [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Received event network-vif-deleted-ef550f91-d2ec-415e-8729-4b311d76b126 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1646.737836] env[62519]: DEBUG nova.compute.manager [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Received event network-vif-plugged-431f108a-5529-45cb-8053-17140e36c28d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1646.737910] env[62519]: DEBUG oslo_concurrency.lockutils [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] Acquiring lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.738151] env[62519]: DEBUG oslo_concurrency.lockutils [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] Lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.738282] env[62519]: DEBUG oslo_concurrency.lockutils [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] Lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.738453] env[62519]: DEBUG nova.compute.manager [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] No waiting events found dispatching network-vif-plugged-431f108a-5529-45cb-8053-17140e36c28d {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1646.738640] env[62519]: WARNING nova.compute.manager [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Received unexpected event network-vif-plugged-431f108a-5529-45cb-8053-17140e36c28d for instance with vm_state building and task_state spawning. [ 1646.738878] env[62519]: DEBUG nova.compute.manager [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Received event network-changed-431f108a-5529-45cb-8053-17140e36c28d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1646.738943] env[62519]: DEBUG nova.compute.manager [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Refreshing instance network info cache due to event network-changed-431f108a-5529-45cb-8053-17140e36c28d. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1646.739368] env[62519]: DEBUG oslo_concurrency.lockutils [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] Acquiring lock "refresh_cache-45d1aa86-a5c8-4e75-a6c8-5f55461702f8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.739368] env[62519]: DEBUG oslo_concurrency.lockutils [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] Acquired lock "refresh_cache-45d1aa86-a5c8-4e75-a6c8-5f55461702f8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.739490] env[62519]: DEBUG nova.network.neutron [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Refreshing network info cache for port 431f108a-5529-45cb-8053-17140e36c28d {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1646.747969] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f48f0d-95ee-484d-934f-a0e1a7f83aca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.766813] env[62519]: DEBUG nova.compute.provider_tree [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.929866] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquiring lock "refresh_cache-45d1aa86-a5c8-4e75-a6c8-5f55461702f8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.068046] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521a8467-75e8-01eb-3ad4-d21675bd0b7d, 'name': SearchDatastore_Task, 'duration_secs': 0.017619} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.068555] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.068816] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8659f63a-5df9-4ff8-84dd-0722026dc820/8659f63a-5df9-4ff8-84dd-0722026dc820.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1647.069094] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9645025f-b5a8-4e70-ac80-2377885b5ec2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.081694] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1647.081694] env[62519]: value = "task-1802645" [ 1647.081694] env[62519]: _type = "Task" [ 1647.081694] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.090479] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802645, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.210633] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802644, 'name': Rename_Task, 'duration_secs': 0.164095} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.210633] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1647.210796] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f9536b9-b311-49c7-80f7-3afe68093422 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.221233] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1647.221233] env[62519]: value = "task-1802646" [ 1647.221233] env[62519]: _type = "Task" [ 1647.221233] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.236453] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802646, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.272623] env[62519]: DEBUG nova.scheduler.client.report [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1647.288112] env[62519]: DEBUG nova.network.neutron [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1647.432333] env[62519]: DEBUG nova.network.neutron [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.595578] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802645, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.732867] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802646, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.778059] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.781559] env[62519]: DEBUG oslo_concurrency.lockutils [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.363s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.781946] env[62519]: DEBUG nova.objects.instance [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lazy-loading 'resources' on Instance uuid 9ac3344d-219a-487f-b83f-96c17cd86dad {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1647.796352] env[62519]: INFO nova.scheduler.client.report [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted allocations for instance 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1 [ 1647.907863] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9f4c1900-996e-42d1-9657-410816fb70d2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "417588f8-6288-4ecd-9764-dbc923549c5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.206s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.937026] env[62519]: DEBUG oslo_concurrency.lockutils [req-4348276f-7056-42d8-a9fe-b5fc2d0769fa req-ed589dd2-f2b3-4e23-a4d0-1a8e9e1b37b6 service nova] Releasing lock "refresh_cache-45d1aa86-a5c8-4e75-a6c8-5f55461702f8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.937188] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquired lock "refresh_cache-45d1aa86-a5c8-4e75-a6c8-5f55461702f8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.937456] env[62519]: DEBUG nova.network.neutron [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1648.095575] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802645, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576466} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.095950] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8659f63a-5df9-4ff8-84dd-0722026dc820/8659f63a-5df9-4ff8-84dd-0722026dc820.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1648.096291] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1648.096841] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af2afc76-0e28-4cf3-b8dd-0a5eb970b5c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.106174] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1648.106174] env[62519]: value = "task-1802647" [ 1648.106174] env[62519]: _type = "Task" [ 1648.106174] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.116316] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802647, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.233451] env[62519]: DEBUG oslo_vmware.api [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802646, 'name': PowerOnVM_Task, 'duration_secs': 0.838941} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.233747] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1648.234015] env[62519]: INFO nova.compute.manager [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Took 8.95 seconds to spawn the instance on the hypervisor. [ 1648.234137] env[62519]: DEBUG nova.compute.manager [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1648.234963] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f901727-8877-4bce-b693-bff191c266d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.306826] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d667a168-5c01-47dd-a85b-f4ea91c23881 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "302edcd3-bd6e-41da-b731-4d4c1bb5c3c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.636s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.472454] env[62519]: DEBUG nova.network.neutron [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1648.620171] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802647, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167132} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.620625] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1648.621826] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17c3aec-1647-4bba-8047-451ab26af248 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.652912] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 8659f63a-5df9-4ff8-84dd-0722026dc820/8659f63a-5df9-4ff8-84dd-0722026dc820.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1648.656712] env[62519]: DEBUG nova.network.neutron [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Updating instance_info_cache with network_info: [{"id": "431f108a-5529-45cb-8053-17140e36c28d", "address": "fa:16:3e:fb:01:be", "network": {"id": "98750ae7-aaf2-4333-a146-af031852339b", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2096386042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19cd4d296ffa4f24818759beed87b66e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap431f108a-55", "ovs_interfaceid": "431f108a-5529-45cb-8053-17140e36c28d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1648.659189] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9dfdd262-c10d-4d87-be0a-7bf428ddcda0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.686657] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1648.686657] env[62519]: value = "task-1802648" [ 1648.686657] env[62519]: _type = "Task" [ 1648.686657] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.692322] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "1cf285cf-8b4c-4872-b179-72e38c0143e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.692559] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "1cf285cf-8b4c-4872-b179-72e38c0143e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1648.706773] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.757701] env[62519]: INFO nova.compute.manager [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Took 40.60 seconds to build instance. [ 1648.837510] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682ee45c-7605-4e07-9be4-62ee0d8962a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.845848] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948e7026-48a6-46e3-a34d-85db36562c36 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.878714] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c548a9-a719-44c8-8308-35ddbcffb629 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.887516] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706119be-c01a-4dc7-af3e-a9ad933f1d4c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.902143] env[62519]: DEBUG nova.compute.provider_tree [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1649.159662] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Releasing lock "refresh_cache-45d1aa86-a5c8-4e75-a6c8-5f55461702f8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.160077] env[62519]: DEBUG nova.compute.manager [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Instance network_info: |[{"id": "431f108a-5529-45cb-8053-17140e36c28d", "address": "fa:16:3e:fb:01:be", "network": {"id": "98750ae7-aaf2-4333-a146-af031852339b", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2096386042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "19cd4d296ffa4f24818759beed87b66e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap431f108a-55", "ovs_interfaceid": "431f108a-5529-45cb-8053-17140e36c28d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1649.160530] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:01:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '431f108a-5529-45cb-8053-17140e36c28d', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1649.168117] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Creating folder: Project (19cd4d296ffa4f24818759beed87b66e). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1649.168442] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60abb34e-e3f6-44cb-914c-a7238be0baad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.183926] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Created folder: Project (19cd4d296ffa4f24818759beed87b66e) in parent group-v373567. [ 1649.184172] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Creating folder: Instances. Parent ref: group-v373748. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1649.184436] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a43227ae-ee55-4fa5-94fa-a4fb8e642f5d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.198261] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Created folder: Instances in parent group-v373748. [ 1649.198552] env[62519]: DEBUG oslo.service.loopingcall [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1649.199255] env[62519]: DEBUG nova.compute.manager [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1649.202170] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1649.205870] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92d98045-f679-499b-9e47-5120d759f878 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.219966] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.227069] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1649.227069] env[62519]: value = "task-1802651" [ 1649.227069] env[62519]: _type = "Task" [ 1649.227069] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.235520] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802651, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.295111] env[62519]: DEBUG nova.compute.manager [req-4620b616-4482-4e23-b02b-3feb84b89a51 req-607dd111-8f1d-4edb-95ec-1b67c775fe49 service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Received event network-changed-55cb0499-3e6e-42ef-bd75-edafccb32e03 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1649.295410] env[62519]: DEBUG nova.compute.manager [req-4620b616-4482-4e23-b02b-3feb84b89a51 req-607dd111-8f1d-4edb-95ec-1b67c775fe49 service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Refreshing instance network info cache due to event network-changed-55cb0499-3e6e-42ef-bd75-edafccb32e03. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1649.295612] env[62519]: DEBUG oslo_concurrency.lockutils [req-4620b616-4482-4e23-b02b-3feb84b89a51 req-607dd111-8f1d-4edb-95ec-1b67c775fe49 service nova] Acquiring lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.299777] env[62519]: DEBUG oslo_concurrency.lockutils [req-4620b616-4482-4e23-b02b-3feb84b89a51 req-607dd111-8f1d-4edb-95ec-1b67c775fe49 service nova] Acquired lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.299777] env[62519]: DEBUG nova.network.neutron [req-4620b616-4482-4e23-b02b-3feb84b89a51 req-607dd111-8f1d-4edb-95ec-1b67c775fe49 service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Refreshing network info cache for port 55cb0499-3e6e-42ef-bd75-edafccb32e03 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1649.405712] env[62519]: DEBUG nova.scheduler.client.report [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1649.707309] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802648, 'name': ReconfigVM_Task, 'duration_secs': 0.89044} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.709717] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 8659f63a-5df9-4ff8-84dd-0722026dc820/8659f63a-5df9-4ff8-84dd-0722026dc820.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1649.710550] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9f8f085-00e2-4f30-a867-49e44bfa7669 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.717957] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1649.717957] env[62519]: value = "task-1802652" [ 1649.717957] env[62519]: _type = "Task" [ 1649.717957] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.725308] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.734242] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802652, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.739369] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802651, 'name': CreateVM_Task, 'duration_secs': 0.434338} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.739533] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1649.740962] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.740962] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.740962] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1649.740962] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-698ddf90-12b6-42ed-b1b6-47447be35aac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.745918] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1649.745918] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fa318d-614d-f9fd-43e7-4a188ee61610" [ 1649.745918] env[62519]: _type = "Task" [ 1649.745918] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.755672] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fa318d-614d-f9fd-43e7-4a188ee61610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.913475] env[62519]: DEBUG oslo_concurrency.lockutils [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.132s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.916474] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.920s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.916721] env[62519]: DEBUG nova.objects.instance [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lazy-loading 'resources' on Instance uuid 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1649.944726] env[62519]: INFO nova.scheduler.client.report [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Deleted allocations for instance 9ac3344d-219a-487f-b83f-96c17cd86dad [ 1650.042006] env[62519]: DEBUG nova.network.neutron [req-4620b616-4482-4e23-b02b-3feb84b89a51 req-607dd111-8f1d-4edb-95ec-1b67c775fe49 service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updated VIF entry in instance network info cache for port 55cb0499-3e6e-42ef-bd75-edafccb32e03. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1650.042397] env[62519]: DEBUG nova.network.neutron [req-4620b616-4482-4e23-b02b-3feb84b89a51 req-607dd111-8f1d-4edb-95ec-1b67c775fe49 service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updating instance_info_cache with network_info: [{"id": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "address": "fa:16:3e:cb:3e:93", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cb0499-3e", "ovs_interfaceid": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.228925] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802652, 'name': Rename_Task, 'duration_secs': 0.16922} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.229234] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1650.229483] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8292ead9-7fc8-4e4b-8dda-5302ce5fd518 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.237305] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1650.237305] env[62519]: value = "task-1802653" [ 1650.237305] env[62519]: _type = "Task" [ 1650.237305] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.245243] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802653, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.254927] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fa318d-614d-f9fd-43e7-4a188ee61610, 'name': SearchDatastore_Task, 'duration_secs': 0.011591} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.255260] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.255503] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1650.255737] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.255883] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.256073] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1650.256319] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f372ab1-eecb-4160-930e-072feb6d2cd7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.265056] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b0f1427a-eff6-4068-bbe3-05ff2c38cc24 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.505s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.266409] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1650.266570] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1650.267308] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1094d3d-53c9-477e-bc31-37c47ee8aa55 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.273423] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1650.273423] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f2fbc6-ce37-db76-015a-5af48dbfd846" [ 1650.273423] env[62519]: _type = "Task" [ 1650.273423] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.282665] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f2fbc6-ce37-db76-015a-5af48dbfd846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.455143] env[62519]: DEBUG oslo_concurrency.lockutils [None req-096fc0a9-25e3-439e-a204-847b80d0d183 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.737s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.456333] env[62519]: DEBUG oslo_concurrency.lockutils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 25.470s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.456552] env[62519]: DEBUG oslo_concurrency.lockutils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "9ac3344d-219a-487f-b83f-96c17cd86dad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.456756] env[62519]: DEBUG oslo_concurrency.lockutils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.457652] env[62519]: DEBUG oslo_concurrency.lockutils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.459020] env[62519]: INFO nova.compute.manager [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Terminating instance [ 1650.545102] env[62519]: DEBUG oslo_concurrency.lockutils [req-4620b616-4482-4e23-b02b-3feb84b89a51 req-607dd111-8f1d-4edb-95ec-1b67c775fe49 service nova] Releasing lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.756933] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802653, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.786301] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f2fbc6-ce37-db76-015a-5af48dbfd846, 'name': SearchDatastore_Task, 'duration_secs': 0.012609} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.789927] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b917db9b-9210-4da9-ad86-9db60f7d9003 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.796022] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1650.796022] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5222b1d5-201b-7545-4180-1b5db40115bf" [ 1650.796022] env[62519]: _type = "Task" [ 1650.796022] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.806892] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5222b1d5-201b-7545-4180-1b5db40115bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.863482] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c4bd15-597d-4893-aaf4-a226cf3be8bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.871848] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2508c01d-2529-42bd-b140-ee37fbd69507 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.906038] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e5a14c-be5c-4cf8-a4d4-1fecb18a50e6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.915274] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa9c23e-770c-41e1-b65b-e2b7c2f6cff8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.930692] env[62519]: DEBUG nova.compute.provider_tree [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1650.962366] env[62519]: DEBUG oslo_concurrency.lockutils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.962540] env[62519]: DEBUG oslo_concurrency.lockutils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquired lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.962717] env[62519]: DEBUG nova.network.neutron [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1651.252735] env[62519]: DEBUG oslo_vmware.api [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802653, 'name': PowerOnVM_Task, 'duration_secs': 0.559225} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.253036] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1651.253246] env[62519]: INFO nova.compute.manager [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Took 9.25 seconds to spawn the instance on the hypervisor. [ 1651.253425] env[62519]: DEBUG nova.compute.manager [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1651.254253] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79ed846-6a39-40f8-b8f3-f5b205b87938 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.307644] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5222b1d5-201b-7545-4180-1b5db40115bf, 'name': SearchDatastore_Task, 'duration_secs': 0.014366} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.308676] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.308947] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 45d1aa86-a5c8-4e75-a6c8-5f55461702f8/45d1aa86-a5c8-4e75-a6c8-5f55461702f8.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1651.309229] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07b9e60a-337a-41c5-8924-873ba3038765 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.318324] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1651.318324] env[62519]: value = "task-1802654" [ 1651.318324] env[62519]: _type = "Task" [ 1651.318324] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.328231] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.434126] env[62519]: DEBUG nova.scheduler.client.report [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1651.464541] env[62519]: DEBUG nova.compute.utils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Can not refresh info_cache because instance was not found {{(pid=62519) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1055}} [ 1651.480943] env[62519]: DEBUG nova.network.neutron [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1651.559377] env[62519]: DEBUG nova.network.neutron [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.772790] env[62519]: INFO nova.compute.manager [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Took 39.79 seconds to build instance. [ 1651.830373] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802654, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.863125] env[62519]: DEBUG oslo_concurrency.lockutils [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "09eefc1a-011b-4d2c-ab75-a1fcee740907" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.864173] env[62519]: DEBUG oslo_concurrency.lockutils [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "09eefc1a-011b-4d2c-ab75-a1fcee740907" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.864173] env[62519]: DEBUG oslo_concurrency.lockutils [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "09eefc1a-011b-4d2c-ab75-a1fcee740907-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.864173] env[62519]: DEBUG oslo_concurrency.lockutils [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "09eefc1a-011b-4d2c-ab75-a1fcee740907-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.864909] env[62519]: DEBUG oslo_concurrency.lockutils [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "09eefc1a-011b-4d2c-ab75-a1fcee740907-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.866659] env[62519]: INFO nova.compute.manager [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Terminating instance [ 1651.941132] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.023s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.943390] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.418s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.945986] env[62519]: INFO nova.compute.claims [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1651.968030] env[62519]: INFO nova.scheduler.client.report [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted allocations for instance 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d [ 1652.061790] env[62519]: DEBUG oslo_concurrency.lockutils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Releasing lock "refresh_cache-9ac3344d-219a-487f-b83f-96c17cd86dad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.062281] env[62519]: DEBUG nova.compute.manager [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1652.062554] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1652.062925] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82e64074-e28c-427b-bed0-13c6c8cb3a33 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.073807] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b77828-02b1-4063-81fb-bd07a4d053cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.117585] env[62519]: WARNING nova.virt.vmwareapi.vmops [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9ac3344d-219a-487f-b83f-96c17cd86dad could not be found. [ 1652.117815] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1652.117999] env[62519]: INFO nova.compute.manager [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1652.118282] env[62519]: DEBUG oslo.service.loopingcall [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1652.118644] env[62519]: DEBUG nova.compute.manager [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1652.118778] env[62519]: DEBUG nova.network.neutron [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1652.134856] env[62519]: DEBUG nova.network.neutron [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1652.331729] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.885086} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.331729] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 45d1aa86-a5c8-4e75-a6c8-5f55461702f8/45d1aa86-a5c8-4e75-a6c8-5f55461702f8.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1652.331729] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1652.331975] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f458bff7-dbf9-4fc7-9cb2-09820f374e5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.340059] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1652.340059] env[62519]: value = "task-1802655" [ 1652.340059] env[62519]: _type = "Task" [ 1652.340059] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.348697] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.373524] env[62519]: DEBUG nova.compute.manager [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1652.373803] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1652.374816] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3027a5dc-5eb8-45d9-bd78-2bd841f2dd6d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.384045] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1652.384045] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4abfa430-7ed4-4d1f-93d3-80aac77754c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.391124] env[62519]: DEBUG oslo_vmware.api [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1652.391124] env[62519]: value = "task-1802656" [ 1652.391124] env[62519]: _type = "Task" [ 1652.391124] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.402569] env[62519]: DEBUG oslo_vmware.api [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.478943] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8c697949-3a2a-4b38-83ce-543b154f5eb6 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "80ef3fd4-b9ef-4fd2-a991-feec78a0c81d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.795s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.637501] env[62519]: DEBUG nova.network.neutron [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.851576] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076376} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.851576] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1652.852369] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e7213f-6103-4b4d-9282-75167961a056 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.878749] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 45d1aa86-a5c8-4e75-a6c8-5f55461702f8/45d1aa86-a5c8-4e75-a6c8-5f55461702f8.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1652.879060] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-054fc0ae-7611-4703-8138-ffcc03103512 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.904518] env[62519]: DEBUG oslo_vmware.api [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802656, 'name': PowerOffVM_Task, 'duration_secs': 0.431424} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.905862] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1652.906054] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1652.906396] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1652.906396] env[62519]: value = "task-1802657" [ 1652.906396] env[62519]: _type = "Task" [ 1652.906396] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.906554] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-659f8b9f-c1d6-4072-8ee0-6049fb2db52f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.917411] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802657, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.054456] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1653.054739] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1653.054824] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Deleting the datastore file [datastore1] 09eefc1a-011b-4d2c-ab75-a1fcee740907 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1653.055159] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-feff72b0-bc55-4827-927d-2e2b69b2e2d8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.063268] env[62519]: DEBUG oslo_vmware.api [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for the task: (returnval){ [ 1653.063268] env[62519]: value = "task-1802659" [ 1653.063268] env[62519]: _type = "Task" [ 1653.063268] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.078046] env[62519]: DEBUG oslo_vmware.api [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.141380] env[62519]: INFO nova.compute.manager [-] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Took 1.02 seconds to deallocate network for instance. [ 1653.283681] env[62519]: DEBUG oslo_concurrency.lockutils [None req-24267348-edd5-4ceb-9bdc-8a9f43b89725 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "8659f63a-5df9-4ff8-84dd-0722026dc820" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.380s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.419935] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802657, 'name': ReconfigVM_Task, 'duration_secs': 0.317015} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.420292] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 45d1aa86-a5c8-4e75-a6c8-5f55461702f8/45d1aa86-a5c8-4e75-a6c8-5f55461702f8.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1653.420859] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26dd362b-b30e-48d1-940b-d91dbbe6fa51 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.430111] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1653.430111] env[62519]: value = "task-1802660" [ 1653.430111] env[62519]: _type = "Task" [ 1653.430111] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.440452] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802660, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.454017] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4a2da7-9d3a-462f-93b2-21b70308e01a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.462441] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf14cd1f-3eb1-4ce6-8906-4158763ef1d9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.501528] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d500cfa7-6868-4343-9be4-ce0e8e5129af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.511224] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc59866-48b3-4b93-9fa3-578c1200c5cd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.528707] env[62519]: DEBUG nova.compute.provider_tree [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1653.578573] env[62519]: DEBUG oslo_vmware.api [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Task: {'id': task-1802659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.304717} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.578722] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1653.578902] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1653.579088] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1653.579252] env[62519]: INFO nova.compute.manager [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1653.579501] env[62519]: DEBUG oslo.service.loopingcall [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1653.579695] env[62519]: DEBUG nova.compute.manager [-] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1653.579790] env[62519]: DEBUG nova.network.neutron [-] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1653.652364] env[62519]: INFO nova.compute.manager [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance disappeared during terminate [ 1653.652679] env[62519]: DEBUG oslo_concurrency.lockutils [None req-388da41b-8675-4c7e-9421-8a1f0be3acc0 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "9ac3344d-219a-487f-b83f-96c17cd86dad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.196s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.654088] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquiring lock "8659f63a-5df9-4ff8-84dd-0722026dc820" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.654439] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "8659f63a-5df9-4ff8-84dd-0722026dc820" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.654772] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquiring lock "8659f63a-5df9-4ff8-84dd-0722026dc820-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.656021] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "8659f63a-5df9-4ff8-84dd-0722026dc820-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.656021] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "8659f63a-5df9-4ff8-84dd-0722026dc820-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.658097] env[62519]: INFO nova.compute.manager [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Terminating instance [ 1653.938086] env[62519]: DEBUG nova.compute.manager [req-9ccd7928-ee98-41ae-b592-e888f5463cef req-15ababbe-4fed-44d3-807e-ccf386446a88 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Received event network-vif-deleted-7948e8a1-83dc-4329-aa44-813b4e25c1c7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1653.938532] env[62519]: INFO nova.compute.manager [req-9ccd7928-ee98-41ae-b592-e888f5463cef req-15ababbe-4fed-44d3-807e-ccf386446a88 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Neutron deleted interface 7948e8a1-83dc-4329-aa44-813b4e25c1c7; detaching it from the instance and deleting it from the info cache [ 1653.938606] env[62519]: DEBUG nova.network.neutron [req-9ccd7928-ee98-41ae-b592-e888f5463cef req-15ababbe-4fed-44d3-807e-ccf386446a88 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.946167] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802660, 'name': Rename_Task, 'duration_secs': 0.158344} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.946167] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1653.946167] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4bfdd08-9903-4526-acb7-b7830ce8f1b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.956098] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1653.956098] env[62519]: value = "task-1802661" [ 1653.956098] env[62519]: _type = "Task" [ 1653.956098] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.966468] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802661, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.032173] env[62519]: DEBUG nova.scheduler.client.report [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1654.161593] env[62519]: DEBUG nova.compute.manager [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1654.161908] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1654.162764] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0279ed5c-6ae9-438a-abad-77610cbe0b58 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.171914] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1654.172187] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7210e0d1-427b-4159-a6c8-4d9859a005ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.179513] env[62519]: DEBUG oslo_vmware.api [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1654.179513] env[62519]: value = "task-1802662" [ 1654.179513] env[62519]: _type = "Task" [ 1654.179513] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.188502] env[62519]: DEBUG oslo_vmware.api [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.341474] env[62519]: DEBUG nova.network.neutron [-] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.442195] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f28aa6a-9d62-4fc6-b6be-bfe58a61fe05 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.454236] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9c2ae1-8e3c-4c9f-8858-0beb82511518 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.479607] env[62519]: DEBUG oslo_vmware.api [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802661, 'name': PowerOnVM_Task, 'duration_secs': 0.503171} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.479705] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1654.479999] env[62519]: INFO nova.compute.manager [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1654.480219] env[62519]: DEBUG nova.compute.manager [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1654.481310] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b1e1a3-d1d6-4462-b26f-63fbdcdeb19a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.507796] env[62519]: DEBUG nova.compute.manager [req-9ccd7928-ee98-41ae-b592-e888f5463cef req-15ababbe-4fed-44d3-807e-ccf386446a88 service nova] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Detach interface failed, port_id=7948e8a1-83dc-4329-aa44-813b4e25c1c7, reason: Instance 09eefc1a-011b-4d2c-ab75-a1fcee740907 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1654.534935] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "88f9351c-253b-49dd-a88e-b8585ea742ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.535887] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.536898] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.537394] env[62519]: DEBUG nova.compute.manager [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1654.540242] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.936s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.540478] env[62519]: DEBUG nova.objects.instance [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1654.689792] env[62519]: DEBUG oslo_vmware.api [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802662, 'name': PowerOffVM_Task, 'duration_secs': 0.199015} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.690181] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1654.690522] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1654.690814] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ed28b0c-de10-4cbb-863f-acbe9bfc5189 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.844715] env[62519]: INFO nova.compute.manager [-] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Took 1.26 seconds to deallocate network for instance. [ 1654.910838] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1654.911174] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1654.911588] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Deleting the datastore file [datastore1] 8659f63a-5df9-4ff8-84dd-0722026dc820 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1654.911840] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f78be8d6-451e-46d0-8184-a23ab51d7098 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.919900] env[62519]: DEBUG oslo_vmware.api [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for the task: (returnval){ [ 1654.919900] env[62519]: value = "task-1802664" [ 1654.919900] env[62519]: _type = "Task" [ 1654.919900] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.938891] env[62519]: DEBUG oslo_vmware.api [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.025900] env[62519]: INFO nova.compute.manager [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Took 41.69 seconds to build instance. [ 1655.041302] env[62519]: DEBUG nova.compute.manager [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1655.044700] env[62519]: DEBUG nova.compute.utils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1655.049242] env[62519]: DEBUG nova.compute.manager [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1655.049425] env[62519]: DEBUG nova.network.neutron [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1655.103725] env[62519]: DEBUG nova.policy [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12d88579b24941a0be744afe44126360', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df6727c290724a8ebef5188c77e91399', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1655.351254] env[62519]: DEBUG oslo_concurrency.lockutils [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.373696] env[62519]: DEBUG nova.network.neutron [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Successfully created port: 925db578-876c-414b-8500-56c73c7cdfe8 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1655.433582] env[62519]: DEBUG oslo_vmware.api [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Task: {'id': task-1802664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.385259} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.433891] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1655.434094] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1655.434283] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1655.434498] env[62519]: INFO nova.compute.manager [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1655.434748] env[62519]: DEBUG oslo.service.loopingcall [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1655.434950] env[62519]: DEBUG nova.compute.manager [-] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1655.435090] env[62519]: DEBUG nova.network.neutron [-] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1655.487771] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquiring lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.552513] env[62519]: DEBUG nova.compute.manager [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1655.557321] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d6c69a6-1fea-4ba8-a2c2-a90023b1aa8f tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.560806] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.295s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.561047] env[62519]: DEBUG nova.objects.instance [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lazy-loading 'resources' on Instance uuid 27f9e890-4733-43aa-9bf1-351d42d75418 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1655.584547] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.966161] env[62519]: DEBUG nova.compute.manager [req-b6d19e88-b5b3-423e-86a4-6545f2bcb8dc req-6c8e9b23-dab3-4eaa-8189-4a6caf39b9cb service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Received event network-vif-deleted-b025403f-8225-412e-908e-f5078a442ed2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1655.966457] env[62519]: INFO nova.compute.manager [req-b6d19e88-b5b3-423e-86a4-6545f2bcb8dc req-6c8e9b23-dab3-4eaa-8189-4a6caf39b9cb service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Neutron deleted interface b025403f-8225-412e-908e-f5078a442ed2; detaching it from the instance and deleting it from the info cache [ 1655.966736] env[62519]: DEBUG nova.network.neutron [req-b6d19e88-b5b3-423e-86a4-6545f2bcb8dc req-6c8e9b23-dab3-4eaa-8189-4a6caf39b9cb service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.337941] env[62519]: DEBUG nova.network.neutron [-] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.467068] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4c69a5-02fc-4156-a0b8-1cabcf5231fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.470412] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-384f9b50-7c27-4cec-9819-34f7f8ce958c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.477356] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f51916-57b2-4d6f-a605-3648183a984e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.484443] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc2e2b8-2be0-47b9-aab6-a12795a8a01c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.520049] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f5d2b9-31d7-4339-ae71-b359b5276549 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.529412] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486bd07f-fcc7-4b9c-9667-2fcb3ffba524 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.546128] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f0afbcc8-8045-4111-b3d2-45982468a24d tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.690s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.546483] env[62519]: DEBUG nova.compute.manager [req-b6d19e88-b5b3-423e-86a4-6545f2bcb8dc req-6c8e9b23-dab3-4eaa-8189-4a6caf39b9cb service nova] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Detach interface failed, port_id=b025403f-8225-412e-908e-f5078a442ed2, reason: Instance 8659f63a-5df9-4ff8-84dd-0722026dc820 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1656.546889] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.059s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.547117] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquiring lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.547347] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.547514] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.549586] env[62519]: INFO nova.compute.manager [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Terminating instance [ 1656.560476] env[62519]: DEBUG nova.compute.provider_tree [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1656.562612] env[62519]: DEBUG nova.compute.manager [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1656.591352] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1656.591611] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1656.591812] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1656.591989] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1656.592159] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1656.592306] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1656.592539] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1656.592714] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1656.592882] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1656.593053] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1656.593238] env[62519]: DEBUG nova.virt.hardware [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1656.594314] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c58397-4e43-4299-b138-124b91e55dca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.603403] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d901f64a-39bc-45bb-96a1-a046674b0921 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.840695] env[62519]: INFO nova.compute.manager [-] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Took 1.41 seconds to deallocate network for instance. [ 1657.004398] env[62519]: DEBUG nova.network.neutron [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Successfully updated port: 925db578-876c-414b-8500-56c73c7cdfe8 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1657.063122] env[62519]: DEBUG nova.compute.manager [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1657.063415] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1657.066866] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fec77d-d538-4892-af4a-e48c11e3ac21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.068140] env[62519]: DEBUG nova.scheduler.client.report [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1657.078124] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1657.078383] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e05b4ea4-c55d-4bae-bfae-2cc87255208a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.087484] env[62519]: DEBUG oslo_vmware.api [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1657.087484] env[62519]: value = "task-1802665" [ 1657.087484] env[62519]: _type = "Task" [ 1657.087484] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.097947] env[62519]: DEBUG oslo_vmware.api [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802665, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.347754] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.514858] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.514858] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.514858] env[62519]: DEBUG nova.network.neutron [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1657.575523] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.575523] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.958s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.580692] env[62519]: DEBUG nova.objects.instance [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lazy-loading 'resources' on Instance uuid 1d4b14d3-8832-457e-aaed-462236555f57 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1657.602080] env[62519]: DEBUG oslo_vmware.api [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802665, 'name': PowerOffVM_Task, 'duration_secs': 0.230647} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.602406] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1657.603024] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1657.603365] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-476830df-7bf4-4bba-9ae0-03acd230b529 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.606021] env[62519]: INFO nova.scheduler.client.report [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Deleted allocations for instance 27f9e890-4733-43aa-9bf1-351d42d75418 [ 1657.789507] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1657.789809] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1657.789919] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Deleting the datastore file [datastore1] 45d1aa86-a5c8-4e75-a6c8-5f55461702f8 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1657.790213] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b4242ae-2c1f-4553-9fa1-b69af15fd161 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.797367] env[62519]: DEBUG oslo_vmware.api [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for the task: (returnval){ [ 1657.797367] env[62519]: value = "task-1802667" [ 1657.797367] env[62519]: _type = "Task" [ 1657.797367] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.806194] env[62519]: DEBUG oslo_vmware.api [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.992329] env[62519]: DEBUG nova.compute.manager [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Received event network-vif-plugged-925db578-876c-414b-8500-56c73c7cdfe8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1657.992329] env[62519]: DEBUG oslo_concurrency.lockutils [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] Acquiring lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.992478] env[62519]: DEBUG oslo_concurrency.lockutils [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.992645] env[62519]: DEBUG oslo_concurrency.lockutils [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.992885] env[62519]: DEBUG nova.compute.manager [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] No waiting events found dispatching network-vif-plugged-925db578-876c-414b-8500-56c73c7cdfe8 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1657.993078] env[62519]: WARNING nova.compute.manager [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Received unexpected event network-vif-plugged-925db578-876c-414b-8500-56c73c7cdfe8 for instance with vm_state building and task_state spawning. [ 1657.993242] env[62519]: DEBUG nova.compute.manager [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Received event network-changed-925db578-876c-414b-8500-56c73c7cdfe8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1657.993393] env[62519]: DEBUG nova.compute.manager [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Refreshing instance network info cache due to event network-changed-925db578-876c-414b-8500-56c73c7cdfe8. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1657.993558] env[62519]: DEBUG oslo_concurrency.lockutils [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] Acquiring lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1658.045236] env[62519]: DEBUG nova.network.neutron [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1658.117028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee14a42e-677f-45a8-a61d-6bf7de76787d tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "27f9e890-4733-43aa-9bf1-351d42d75418" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.570s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.181786] env[62519]: DEBUG nova.network.neutron [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance_info_cache with network_info: [{"id": "925db578-876c-414b-8500-56c73c7cdfe8", "address": "fa:16:3e:d9:6c:87", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925db578-87", "ovs_interfaceid": "925db578-876c-414b-8500-56c73c7cdfe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.308764] env[62519]: DEBUG oslo_vmware.api [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Task: {'id': task-1802667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133207} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.311591] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1658.311821] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1658.312015] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1658.312197] env[62519]: INFO nova.compute.manager [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1658.312440] env[62519]: DEBUG oslo.service.loopingcall [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1658.313112] env[62519]: DEBUG nova.compute.manager [-] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1658.313200] env[62519]: DEBUG nova.network.neutron [-] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1658.501043] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3308e849-54e5-4d46-90db-e665b658bac5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.511135] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0808dc7b-d902-41fe-a94c-3a4d2e5a6b61 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.541771] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9b5f30-680d-4790-95be-ee60656fe227 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.550912] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d548176b-86cb-4adf-97f2-9fbb380de2ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.565374] env[62519]: DEBUG nova.compute.provider_tree [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1658.684857] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1658.685219] env[62519]: DEBUG nova.compute.manager [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Instance network_info: |[{"id": "925db578-876c-414b-8500-56c73c7cdfe8", "address": "fa:16:3e:d9:6c:87", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925db578-87", "ovs_interfaceid": "925db578-876c-414b-8500-56c73c7cdfe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1658.685547] env[62519]: DEBUG oslo_concurrency.lockutils [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] Acquired lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.686535] env[62519]: DEBUG nova.network.neutron [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Refreshing network info cache for port 925db578-876c-414b-8500-56c73c7cdfe8 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1658.686915] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:6c:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '925db578-876c-414b-8500-56c73c7cdfe8', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1658.695348] env[62519]: DEBUG oslo.service.loopingcall [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1658.697880] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1658.698468] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b3c7a35-49bf-4b20-9b55-b580c51380eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.721729] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1658.721729] env[62519]: value = "task-1802668" [ 1658.721729] env[62519]: _type = "Task" [ 1658.721729] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.730660] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802668, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.785794] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "8070aa59-3547-460a-b914-0e84620023d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.786162] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "8070aa59-3547-460a-b914-0e84620023d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.786352] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "8070aa59-3547-460a-b914-0e84620023d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.786545] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "8070aa59-3547-460a-b914-0e84620023d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.786733] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "8070aa59-3547-460a-b914-0e84620023d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.793947] env[62519]: INFO nova.compute.manager [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Terminating instance [ 1658.915106] env[62519]: DEBUG nova.network.neutron [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updated VIF entry in instance network info cache for port 925db578-876c-414b-8500-56c73c7cdfe8. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1658.915572] env[62519]: DEBUG nova.network.neutron [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance_info_cache with network_info: [{"id": "925db578-876c-414b-8500-56c73c7cdfe8", "address": "fa:16:3e:d9:6c:87", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925db578-87", "ovs_interfaceid": "925db578-876c-414b-8500-56c73c7cdfe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.069314] env[62519]: DEBUG nova.scheduler.client.report [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1659.090176] env[62519]: DEBUG nova.network.neutron [-] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.234147] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802668, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.297714] env[62519]: DEBUG nova.compute.manager [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1659.297958] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1659.298911] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdb6e1c-ecb2-46d7-b985-2674c12bd43b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.306610] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1659.306852] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32d48f0f-f743-477b-aba9-aec26e9a8a28 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.313763] env[62519]: DEBUG oslo_vmware.api [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1659.313763] env[62519]: value = "task-1802669" [ 1659.313763] env[62519]: _type = "Task" [ 1659.313763] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.321584] env[62519]: DEBUG oslo_vmware.api [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.418170] env[62519]: DEBUG oslo_concurrency.lockutils [req-95e81bc0-bc3f-468d-a51b-1823b193b2ce req-58278019-8512-40f6-ae6a-43578f2f03cf service nova] Releasing lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.576748] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.579234] env[62519]: DEBUG oslo_concurrency.lockutils [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.493s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.579472] env[62519]: DEBUG nova.objects.instance [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lazy-loading 'resources' on Instance uuid dac173ff-1807-405f-a59c-bb2efef62a0c {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1659.593024] env[62519]: INFO nova.compute.manager [-] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Took 1.28 seconds to deallocate network for instance. [ 1659.605747] env[62519]: INFO nova.scheduler.client.report [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Deleted allocations for instance 1d4b14d3-8832-457e-aaed-462236555f57 [ 1659.733471] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802668, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.824338] env[62519]: DEBUG oslo_vmware.api [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802669, 'name': PowerOffVM_Task, 'duration_secs': 0.216299} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.824338] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1659.824338] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1659.824540] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4e3f551-0f2d-48a5-9640-c1f9077de735 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.917665] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1659.917898] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1659.918101] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Deleting the datastore file [datastore1] 8070aa59-3547-460a-b914-0e84620023d0 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1659.918373] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5dde497-d19b-47d7-963b-ad04238ae981 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.924822] env[62519]: DEBUG oslo_vmware.api [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for the task: (returnval){ [ 1659.924822] env[62519]: value = "task-1802671" [ 1659.924822] env[62519]: _type = "Task" [ 1659.924822] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.933693] env[62519]: DEBUG oslo_vmware.api [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802671, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.059901] env[62519]: DEBUG nova.compute.manager [req-10f9bb08-a56e-4fee-b9a7-7070806e1fef req-181c3237-8fa3-4aaa-95b6-b8b5d15f53d7 service nova] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Received event network-vif-deleted-431f108a-5529-45cb-8053-17140e36c28d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1660.100119] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.115443] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ad7bb8ca-d7d9-41c0-86cb-2bc2a1a52dc5 tempest-ListImageFiltersTestJSON-1303275626 tempest-ListImageFiltersTestJSON-1303275626-project-member] Lock "1d4b14d3-8832-457e-aaed-462236555f57" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.215s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.234150] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802668, 'name': CreateVM_Task, 'duration_secs': 1.38352} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.234150] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1660.234837] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.235051] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.235590] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1660.238436] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f5c0752-3233-4af5-be21-104747ae8eee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.244028] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1660.244028] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523aafd8-532a-ee99-3941-d5b8b54af4f9" [ 1660.244028] env[62519]: _type = "Task" [ 1660.244028] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.252169] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523aafd8-532a-ee99-3941-d5b8b54af4f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.436889] env[62519]: DEBUG oslo_vmware.api [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Task: {'id': task-1802671, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132612} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.439165] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1660.439355] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1660.439606] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1660.439802] env[62519]: INFO nova.compute.manager [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1660.440048] env[62519]: DEBUG oslo.service.loopingcall [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1660.440411] env[62519]: DEBUG nova.compute.manager [-] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1660.440510] env[62519]: DEBUG nova.network.neutron [-] [instance: 8070aa59-3547-460a-b914-0e84620023d0] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1660.459684] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea4a2e5-30ea-435f-846f-9ec81db54556 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.467009] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cdb62b-8f9f-4d3e-9600-4d7f86faf958 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.498238] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f01e6ed-5e2a-4f6e-9ebd-3f9080b6af34 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.506231] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5d173b-c835-40b6-a60f-f7e896e1b3c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.519645] env[62519]: DEBUG nova.compute.provider_tree [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.754885] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523aafd8-532a-ee99-3941-d5b8b54af4f9, 'name': SearchDatastore_Task, 'duration_secs': 0.012948} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.755396] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.756548] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1660.756548] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.756548] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.756548] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1660.756548] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0eebd082-244e-48a9-8206-7094fa451e40 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.766598] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1660.766815] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1660.767581] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38cfa295-41e2-4922-8447-f36517087a87 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.774041] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1660.774041] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52951e4f-9fa7-c3a0-cff1-f18cda51a564" [ 1660.774041] env[62519]: _type = "Task" [ 1660.774041] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.782892] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52951e4f-9fa7-c3a0-cff1-f18cda51a564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.024312] env[62519]: DEBUG nova.scheduler.client.report [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1661.213903] env[62519]: DEBUG nova.network.neutron [-] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.286454] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52951e4f-9fa7-c3a0-cff1-f18cda51a564, 'name': SearchDatastore_Task, 'duration_secs': 0.00797} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.287259] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20f61fd6-49ab-4741-949f-394552cd8698 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.292381] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1661.292381] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d378d4-f4c8-e8e6-d366-779e2f245135" [ 1661.292381] env[62519]: _type = "Task" [ 1661.292381] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.300580] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d378d4-f4c8-e8e6-d366-779e2f245135, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.529596] env[62519]: DEBUG oslo_concurrency.lockutils [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.950s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.531929] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 27.559s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.555292] env[62519]: INFO nova.scheduler.client.report [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted allocations for instance dac173ff-1807-405f-a59c-bb2efef62a0c [ 1661.716539] env[62519]: INFO nova.compute.manager [-] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Took 1.28 seconds to deallocate network for instance. [ 1661.802781] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d378d4-f4c8-e8e6-d366-779e2f245135, 'name': SearchDatastore_Task, 'duration_secs': 0.009445} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.803160] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.803395] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ad0af10d-5063-4344-b12f-1d3ee9ea1090/ad0af10d-5063-4344-b12f-1d3ee9ea1090.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1661.803650] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1695b4c-4a3b-43e4-9a18-6901d8bf5336 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.811048] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1661.811048] env[62519]: value = "task-1802672" [ 1661.811048] env[62519]: _type = "Task" [ 1661.811048] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.819167] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.034757] env[62519]: DEBUG nova.objects.instance [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lazy-loading 'migration_context' on Instance uuid a1551278-a306-4534-8d8d-3b3a003dde04 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1662.063569] env[62519]: DEBUG oslo_concurrency.lockutils [None req-72bc2465-a50c-495c-a43f-faf26f866c6b tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "dac173ff-1807-405f-a59c-bb2efef62a0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.726s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.155880] env[62519]: DEBUG nova.compute.manager [req-7406bf89-2ff8-4828-9fab-1e97e08c33ed req-92f892d6-b304-4fc2-82f9-85f1407b4e67 service nova] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Received event network-vif-deleted-11187365-8def-40f6-822e-05bb089dd16b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1662.223034] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.322431] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802672, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482389} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.322701] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ad0af10d-5063-4344-b12f-1d3ee9ea1090/ad0af10d-5063-4344-b12f-1d3ee9ea1090.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1662.322928] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1662.323203] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7044415c-ce14-434a-96dc-1f33a03f3462 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.330184] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1662.330184] env[62519]: value = "task-1802673" [ 1662.330184] env[62519]: _type = "Task" [ 1662.330184] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.338561] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802673, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.363330] env[62519]: DEBUG oslo_concurrency.lockutils [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "f59a31e4-7fb9-4de7-b35f-da811a305f85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.363330] env[62519]: DEBUG oslo_concurrency.lockutils [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "f59a31e4-7fb9-4de7-b35f-da811a305f85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.363330] env[62519]: DEBUG oslo_concurrency.lockutils [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "f59a31e4-7fb9-4de7-b35f-da811a305f85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.363330] env[62519]: DEBUG oslo_concurrency.lockutils [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "f59a31e4-7fb9-4de7-b35f-da811a305f85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.363330] env[62519]: DEBUG oslo_concurrency.lockutils [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "f59a31e4-7fb9-4de7-b35f-da811a305f85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.364680] env[62519]: INFO nova.compute.manager [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Terminating instance [ 1662.847986] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802673, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060065} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.850842] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1662.852116] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8983ef31-5ab3-4afa-990a-99ea75868968 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.874181] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] ad0af10d-5063-4344-b12f-1d3ee9ea1090/ad0af10d-5063-4344-b12f-1d3ee9ea1090.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1662.877160] env[62519]: DEBUG nova.compute.manager [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1662.877364] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1662.877622] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c10397af-a0d0-477b-b8d2-ac5612a27463 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.892896] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6405d177-630d-42bd-8c2d-7a3d33a2f141 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.902009] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1662.903742] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c17145a0-6a58-4bb6-9f6a-12fda241c13a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.905551] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1662.905551] env[62519]: value = "task-1802674" [ 1662.905551] env[62519]: _type = "Task" [ 1662.905551] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.914440] env[62519]: DEBUG oslo_vmware.api [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1662.914440] env[62519]: value = "task-1802675" [ 1662.914440] env[62519]: _type = "Task" [ 1662.914440] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.921133] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.929118] env[62519]: DEBUG oslo_vmware.api [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802675, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.011166] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0a1367-2f61-477b-a22e-de08020ddf7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.020387] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a05b74-3d6d-4c40-a89f-ffefd573d786 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.053246] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371a3b19-b9d4-4b7d-8ade-35dbdb109663 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.062139] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c2d5f5-2754-47bb-9eca-3ea30b299257 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.088438] env[62519]: DEBUG nova.compute.provider_tree [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1663.420441] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802674, 'name': ReconfigVM_Task, 'duration_secs': 0.302886} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.424148] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Reconfigured VM instance instance-00000041 to attach disk [datastore1] ad0af10d-5063-4344-b12f-1d3ee9ea1090/ad0af10d-5063-4344-b12f-1d3ee9ea1090.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1663.424805] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f4ece0a-40f6-4f46-b87f-d9b10ce71dac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.432070] env[62519]: DEBUG oslo_vmware.api [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802675, 'name': PowerOffVM_Task, 'duration_secs': 0.223166} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.433339] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1663.433514] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1663.433822] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1663.433822] env[62519]: value = "task-1802676" [ 1663.433822] env[62519]: _type = "Task" [ 1663.433822] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.434042] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-260cd396-299a-4f78-af7f-a343273e4d50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.445508] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802676, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.524819] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1663.525062] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1663.525250] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleting the datastore file [datastore1] f59a31e4-7fb9-4de7-b35f-da811a305f85 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1663.525530] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-253f45d4-3b4d-4b8e-8796-ebe9b40b9d91 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.531701] env[62519]: DEBUG oslo_vmware.api [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1663.531701] env[62519]: value = "task-1802678" [ 1663.531701] env[62519]: _type = "Task" [ 1663.531701] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.539466] env[62519]: DEBUG oslo_vmware.api [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.592370] env[62519]: DEBUG nova.scheduler.client.report [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1663.947086] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802676, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.041538] env[62519]: DEBUG oslo_vmware.api [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.466906} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.041629] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1664.041740] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1664.041912] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1664.042099] env[62519]: INFO nova.compute.manager [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1664.042345] env[62519]: DEBUG oslo.service.loopingcall [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1664.042544] env[62519]: DEBUG nova.compute.manager [-] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1664.042639] env[62519]: DEBUG nova.network.neutron [-] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1664.448652] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802676, 'name': Rename_Task, 'duration_secs': 0.858395} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.450697] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1664.452801] env[62519]: DEBUG nova.compute.manager [req-4c078e14-5f41-41c3-a16b-3d59554df869 req-44b16916-6d0c-450d-b44c-835b6270747a service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Received event network-vif-deleted-d1be096c-c8b0-4dd6-98d9-8289626a2f66 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1664.453008] env[62519]: INFO nova.compute.manager [req-4c078e14-5f41-41c3-a16b-3d59554df869 req-44b16916-6d0c-450d-b44c-835b6270747a service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Neutron deleted interface d1be096c-c8b0-4dd6-98d9-8289626a2f66; detaching it from the instance and deleting it from the info cache [ 1664.453196] env[62519]: DEBUG nova.network.neutron [req-4c078e14-5f41-41c3-a16b-3d59554df869 req-44b16916-6d0c-450d-b44c-835b6270747a service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.457482] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1685ad51-42ee-4ba7-a480-8b61c80d8a28 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.467311] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1664.467311] env[62519]: value = "task-1802679" [ 1664.467311] env[62519]: _type = "Task" [ 1664.467311] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.481626] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.609222] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.075s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.613846] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.059s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.615397] env[62519]: INFO nova.compute.claims [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1664.629281] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "67424299-f100-49a1-ab73-0407b60a2d9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.629512] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "67424299-f100-49a1-ab73-0407b60a2d9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.680829] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "83a680b9-0c2d-4231-9ddf-9aa90209c620" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.681092] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "83a680b9-0c2d-4231-9ddf-9aa90209c620" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.893088] env[62519]: DEBUG nova.network.neutron [-] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.958298] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a66280c-5ca5-4cac-a6fe-f368d08a6a52 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.967871] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a65493-1c3c-4879-b2b3-5d6afa9044ff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.987972] env[62519]: DEBUG oslo_vmware.api [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802679, 'name': PowerOnVM_Task, 'duration_secs': 0.444732} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.988263] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1664.988463] env[62519]: INFO nova.compute.manager [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Took 8.43 seconds to spawn the instance on the hypervisor. [ 1664.991833] env[62519]: DEBUG nova.compute.manager [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1664.991833] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1d6a8a-0535-4b92-85a1-a6537344a924 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.009581] env[62519]: DEBUG nova.compute.manager [req-4c078e14-5f41-41c3-a16b-3d59554df869 req-44b16916-6d0c-450d-b44c-835b6270747a service nova] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Detach interface failed, port_id=d1be096c-c8b0-4dd6-98d9-8289626a2f66, reason: Instance f59a31e4-7fb9-4de7-b35f-da811a305f85 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1665.131316] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1665.184036] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1665.396093] env[62519]: INFO nova.compute.manager [-] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Took 1.35 seconds to deallocate network for instance. [ 1665.521062] env[62519]: INFO nova.compute.manager [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Took 39.04 seconds to build instance. [ 1665.659400] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.706108] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.903326] env[62519]: DEBUG oslo_concurrency.lockutils [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.055429] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4986f2-3694-4d24-8128-d80d888c75b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.064046] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e63b26b-e8d2-481d-a515-50464ea211a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.093162] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee83fec-38ca-40f1-81de-7829339dd703 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.100434] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34317f8-349f-4c7c-b347-e55d1b74aa4d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.115412] env[62519]: DEBUG nova.compute.provider_tree [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1666.158565] env[62519]: INFO nova.compute.manager [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Swapping old allocation on dict_keys(['f8ca0d98-9158-4b85-ae0e-b106f966dd44']) held by migration 33e7914a-348b-43c3-a3c3-7f65f496ad5a for instance [ 1666.183747] env[62519]: DEBUG nova.scheduler.client.report [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Overwriting current allocation {'allocations': {'f8ca0d98-9158-4b85-ae0e-b106f966dd44': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 95}}, 'project_id': 'ae1756c7a3124e6aa1099cf99d6f6285', 'user_id': 'a45b289ccc56467b85f612a102eebf72', 'consumer_generation': 1} on consumer a1551278-a306-4534-8d8d-3b3a003dde04 {{(pid=62519) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1666.262024] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.262241] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquired lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.262422] env[62519]: DEBUG nova.network.neutron [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1666.620158] env[62519]: DEBUG nova.scheduler.client.report [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1666.999278] env[62519]: DEBUG nova.network.neutron [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance_info_cache with network_info: [{"id": "1cf55e56-b406-4e45-9b63-912d4587f930", "address": "fa:16:3e:57:b8:10", "network": {"id": "b4c20e4e-b4f0-4757-9ab0-b74eef10b678", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4069337684d348e0a1ab4eb6a1a2b14d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe99da4f-5630-4afd-918b-b327193d8489", "external-id": "nsx-vlan-transportzone-688", "segmentation_id": 688, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cf55e56-b4", "ovs_interfaceid": "1cf55e56-b406-4e45-9b63-912d4587f930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1667.029358] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aeecf1a6-198a-4c5f-8645-99f7ca1d1fc0 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.336s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.066405] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.066673] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.127701] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.128258] env[62519]: DEBUG nova.compute.manager [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1667.131237] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.725s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.131457] env[62519]: DEBUG nova.objects.instance [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lazy-loading 'resources' on Instance uuid 540167be-5295-4e28-9b25-16317746dd0e {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1667.502749] env[62519]: DEBUG oslo_concurrency.lockutils [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Releasing lock "refresh_cache-a1551278-a306-4534-8d8d-3b3a003dde04" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.503272] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1667.503563] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66d3aad3-1c57-4026-a210-2ec9c02be7b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.511835] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1667.511835] env[62519]: value = "task-1802680" [ 1667.511835] env[62519]: _type = "Task" [ 1667.511835] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.522922] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802680, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.577013] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.577223] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1667.634616] env[62519]: DEBUG nova.compute.utils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1667.640498] env[62519]: DEBUG nova.compute.manager [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1667.640721] env[62519]: DEBUG nova.network.neutron [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1667.695007] env[62519]: DEBUG nova.policy [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b04fa80750184b97a16ec1880e0a585c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '683d60927fdf424386ffcfaa344a7af6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1667.756770] env[62519]: DEBUG nova.compute.manager [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Stashing vm_state: active {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 1668.032018] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802680, 'name': PowerOffVM_Task, 'duration_secs': 0.230086} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.032018] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1668.032018] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T08:12:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='95834530-4e39-49f6-a893-5ac841902368',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2145697929',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1668.032018] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1668.032018] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1668.032018] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1668.032018] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1668.032390] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1668.032594] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1668.032755] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1668.033092] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1668.033092] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1668.033247] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1668.038277] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-152c2874-c5a9-4939-a06c-7ba037c6e5b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.058579] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1668.058579] env[62519]: value = "task-1802681" [ 1668.058579] env[62519]: _type = "Task" [ 1668.058579] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.070200] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802681, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.090593] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d2ab86-da44-47e2-b83d-34f5376ac39a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.098085] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d6effe-0e9c-43a7-adaf-71704dd8e10d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.131375] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d809127d-8920-48fa-821b-0e742cf7f416 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.139417] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34421483-c45c-4e60-b434-188e2ed640b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.144232] env[62519]: DEBUG nova.compute.manager [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1668.157025] env[62519]: DEBUG nova.compute.provider_tree [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1668.208230] env[62519]: DEBUG nova.network.neutron [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Successfully created port: 3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1668.273296] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.569620] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802681, 'name': ReconfigVM_Task, 'duration_secs': 0.243406} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.570463] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d992adfb-6193-4b59-8252-16e7b98f0311 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.591199] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T08:12:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='95834530-4e39-49f6-a893-5ac841902368',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2145697929',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1668.591435] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1668.591587] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1668.591768] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1668.591914] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1668.592071] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1668.592274] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1668.592431] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1668.592587] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1668.592782] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1668.592952] env[62519]: DEBUG nova.virt.hardware [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1668.593911] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e413f3ea-d4ba-4c7a-88de-dfbb148c55b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.600034] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1668.600034] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521b49b8-33e2-8f21-1f35-d21315d97e7b" [ 1668.600034] env[62519]: _type = "Task" [ 1668.600034] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.609167] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521b49b8-33e2-8f21-1f35-d21315d97e7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.660175] env[62519]: DEBUG nova.scheduler.client.report [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1669.111392] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521b49b8-33e2-8f21-1f35-d21315d97e7b, 'name': SearchDatastore_Task, 'duration_secs': 0.008792} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.118018] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfiguring VM instance instance-0000002c to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1669.118018] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0438fd1c-1961-4ba8-8620-07309a458212 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.130291] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.130437] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquired lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.130579] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Forcefully refreshing network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1669.137513] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1669.137513] env[62519]: value = "task-1802682" [ 1669.137513] env[62519]: _type = "Task" [ 1669.137513] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.148319] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802682, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.152503] env[62519]: DEBUG nova.compute.manager [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1669.165927] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.035s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.169055] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.117s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.169055] env[62519]: DEBUG nova.objects.instance [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lazy-loading 'resources' on Instance uuid d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.181895] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1669.182103] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1669.182156] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1669.182323] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1669.182478] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1669.182621] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1669.182873] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1669.183088] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1669.183264] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1669.183451] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1669.183611] env[62519]: DEBUG nova.virt.hardware [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1669.184692] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b721f9c8-0e14-495b-be76-9f41b48b8733 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.192446] env[62519]: INFO nova.scheduler.client.report [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Deleted allocations for instance 540167be-5295-4e28-9b25-16317746dd0e [ 1669.194430] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27ba261-9710-4161-a233-6931ed945b1a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.613892] env[62519]: DEBUG nova.compute.manager [req-fb159e40-9be0-4880-8058-c50e14cf400e req-580872f8-bf08-47bf-b18d-dca2b239ed44 service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Received event network-vif-plugged-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1669.614137] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb159e40-9be0-4880-8058-c50e14cf400e req-580872f8-bf08-47bf-b18d-dca2b239ed44 service nova] Acquiring lock "40507d8c-8f30-45d4-9c65-03f8b1271afb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.614351] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb159e40-9be0-4880-8058-c50e14cf400e req-580872f8-bf08-47bf-b18d-dca2b239ed44 service nova] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.614516] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb159e40-9be0-4880-8058-c50e14cf400e req-580872f8-bf08-47bf-b18d-dca2b239ed44 service nova] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.614703] env[62519]: DEBUG nova.compute.manager [req-fb159e40-9be0-4880-8058-c50e14cf400e req-580872f8-bf08-47bf-b18d-dca2b239ed44 service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] No waiting events found dispatching network-vif-plugged-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1669.614887] env[62519]: WARNING nova.compute.manager [req-fb159e40-9be0-4880-8058-c50e14cf400e req-580872f8-bf08-47bf-b18d-dca2b239ed44 service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Received unexpected event network-vif-plugged-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 for instance with vm_state building and task_state spawning. [ 1669.653518] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802682, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.703870] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c88119e7-b14e-4315-8bdf-c1e98eca84f2 tempest-TenantUsagesTestJSON-1732852836 tempest-TenantUsagesTestJSON-1732852836-project-member] Lock "540167be-5295-4e28-9b25-16317746dd0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.782s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.732038] env[62519]: DEBUG nova.network.neutron [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Successfully updated port: 3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1670.031355] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b27b91-cdac-40cd-9897-0f019f45bba0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.039415] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4bdd09-02f7-4400-bd90-9329b6d68867 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.069361] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123eada1-0555-485a-bbce-0e4daa2bff01 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.076861] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ebced4-b9ca-48c3-b59d-d65ba7a30575 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.089900] env[62519]: DEBUG nova.compute.provider_tree [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1670.147292] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802682, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.234056] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.234238] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.234403] env[62519]: DEBUG nova.network.neutron [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1670.402321] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updating instance_info_cache with network_info: [{"id": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "address": "fa:16:3e:cc:ef:21", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1a3bbf-3f", "ovs_interfaceid": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.593023] env[62519]: DEBUG nova.scheduler.client.report [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1670.650553] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802682, 'name': ReconfigVM_Task, 'duration_secs': 1.139963} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.650898] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfigured VM instance instance-0000002c to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1670.651734] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f3e46e-1569-459b-9401-38a25f691144 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.674189] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] a1551278-a306-4534-8d8d-3b3a003dde04/a1551278-a306-4534-8d8d-3b3a003dde04.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1670.675075] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b276c96-1bdd-4a5f-88ff-6171c144db6e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.695347] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1670.695347] env[62519]: value = "task-1802683" [ 1670.695347] env[62519]: _type = "Task" [ 1670.695347] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.703498] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802683, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.769224] env[62519]: DEBUG nova.network.neutron [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1670.907747] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Releasing lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.907890] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updated the network info_cache for instance {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10489}} [ 1670.908112] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.908313] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.908391] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.908544] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.908694] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.908860] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.909024] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1670.909150] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.914839] env[62519]: DEBUG nova.network.neutron [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Updating instance_info_cache with network_info: [{"id": "3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3", "address": "fa:16:3e:5d:4c:fc", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d7bc0c1-8f", "ovs_interfaceid": "3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.098018] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.100677] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.326s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.100946] env[62519]: DEBUG nova.objects.instance [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lazy-loading 'resources' on Instance uuid 4a29bff8-050a-4ad5-9d06-3a59c40b97ee {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1671.121050] env[62519]: INFO nova.scheduler.client.report [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Deleted allocations for instance d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67 [ 1671.205901] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802683, 'name': ReconfigVM_Task, 'duration_secs': 0.293974} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.206220] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Reconfigured VM instance instance-0000002c to attach disk [datastore1] a1551278-a306-4534-8d8d-3b3a003dde04/a1551278-a306-4534-8d8d-3b3a003dde04.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1671.206933] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a428a6e-3578-4c0b-a641-1f10436403a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.226085] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446c4beb-3c35-4d74-88d6-7239a7c9e03c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.247182] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f03f59d-5259-4963-8ebe-d25aea7078e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.267500] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca857e5f-a172-402e-86b7-95d4b1c7bc77 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.275397] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1671.275745] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6df289b6-8976-4812-a110-8e2db4983e8f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.281832] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1671.281832] env[62519]: value = "task-1802684" [ 1671.281832] env[62519]: _type = "Task" [ 1671.281832] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.292507] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802684, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.411885] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.418689] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.419108] env[62519]: DEBUG nova.compute.manager [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Instance network_info: |[{"id": "3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3", "address": "fa:16:3e:5d:4c:fc", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d7bc0c1-8f", "ovs_interfaceid": "3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1671.419555] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:4c:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1671.427601] env[62519]: DEBUG oslo.service.loopingcall [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1671.427863] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1671.428111] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28e111d3-ea6a-4e1f-bf23-2308f350c91e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.450149] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1671.450149] env[62519]: value = "task-1802685" [ 1671.450149] env[62519]: _type = "Task" [ 1671.450149] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.458818] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802685, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.630248] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f8b1449c-fa5c-4ec6-8d24-81a01f4c8031 tempest-ServersTestMultiNic-797441404 tempest-ServersTestMultiNic-797441404-project-member] Lock "d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.141s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.641244] env[62519]: DEBUG nova.compute.manager [req-bc237fdc-232e-45dc-9343-e9df852028d3 req-09034e59-99f5-46f5-b8bb-90a423bfa6ee service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Received event network-changed-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1671.641798] env[62519]: DEBUG nova.compute.manager [req-bc237fdc-232e-45dc-9343-e9df852028d3 req-09034e59-99f5-46f5-b8bb-90a423bfa6ee service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Refreshing instance network info cache due to event network-changed-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1671.642040] env[62519]: DEBUG oslo_concurrency.lockutils [req-bc237fdc-232e-45dc-9343-e9df852028d3 req-09034e59-99f5-46f5-b8bb-90a423bfa6ee service nova] Acquiring lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.642196] env[62519]: DEBUG oslo_concurrency.lockutils [req-bc237fdc-232e-45dc-9343-e9df852028d3 req-09034e59-99f5-46f5-b8bb-90a423bfa6ee service nova] Acquired lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.642352] env[62519]: DEBUG nova.network.neutron [req-bc237fdc-232e-45dc-9343-e9df852028d3 req-09034e59-99f5-46f5-b8bb-90a423bfa6ee service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Refreshing network info cache for port 3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1671.791716] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802684, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.963760] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802685, 'name': CreateVM_Task, 'duration_secs': 0.488408} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.964618] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1671.965642] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.965912] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.966867] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1671.966867] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53c7f6ec-40ed-47e5-b99e-0db81db0b8c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.974723] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1671.974723] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52433955-5b23-83ad-5bff-801c4f2ccc04" [ 1671.974723] env[62519]: _type = "Task" [ 1671.974723] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.983668] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52433955-5b23-83ad-5bff-801c4f2ccc04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.049779] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e7197b-688a-4364-8302-47df90e8a7a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.057303] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b330f4c7-4a7d-4c11-8641-4dfa510bd339 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.090037] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8325f7e2-e4e3-4331-b027-d8e488c16916 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.098487] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5612b6e8-2e3c-4590-bf8f-8634f1d471a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.112506] env[62519]: DEBUG nova.compute.provider_tree [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1672.165445] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquiring lock "37689ec5-4bc8-4361-9e43-00529e2b5538" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.165734] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "37689ec5-4bc8-4361-9e43-00529e2b5538" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.296034] env[62519]: DEBUG oslo_vmware.api [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802684, 'name': PowerOnVM_Task, 'duration_secs': 0.595383} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.298135] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1672.468855] env[62519]: DEBUG nova.network.neutron [req-bc237fdc-232e-45dc-9343-e9df852028d3 req-09034e59-99f5-46f5-b8bb-90a423bfa6ee service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Updated VIF entry in instance network info cache for port 3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1672.469302] env[62519]: DEBUG nova.network.neutron [req-bc237fdc-232e-45dc-9343-e9df852028d3 req-09034e59-99f5-46f5-b8bb-90a423bfa6ee service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Updating instance_info_cache with network_info: [{"id": "3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3", "address": "fa:16:3e:5d:4c:fc", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d7bc0c1-8f", "ovs_interfaceid": "3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.485757] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52433955-5b23-83ad-5bff-801c4f2ccc04, 'name': SearchDatastore_Task, 'duration_secs': 0.010302} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.486678] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.486907] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1672.487153] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.487298] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.487471] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1672.488161] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f9533d8-611e-43fd-8ee1-c6ee461cd2c6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.499339] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1672.499530] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1672.500303] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-043c07dd-ecf3-4203-b0ce-e75dacb92267 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.506688] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1672.506688] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5207419a-e0f0-e7e1-254d-a386edc77d37" [ 1672.506688] env[62519]: _type = "Task" [ 1672.506688] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.513908] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5207419a-e0f0-e7e1-254d-a386edc77d37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.616057] env[62519]: DEBUG nova.scheduler.client.report [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1672.669026] env[62519]: DEBUG nova.compute.manager [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1672.974490] env[62519]: DEBUG oslo_concurrency.lockutils [req-bc237fdc-232e-45dc-9343-e9df852028d3 req-09034e59-99f5-46f5-b8bb-90a423bfa6ee service nova] Releasing lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.016723] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5207419a-e0f0-e7e1-254d-a386edc77d37, 'name': SearchDatastore_Task, 'duration_secs': 0.009942} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.017516] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ac3e8ec-6a89-496e-967e-b86af6a6d259 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.026393] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1673.026393] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527cbf30-2ce9-031f-a83f-84673c1ecb46" [ 1673.026393] env[62519]: _type = "Task" [ 1673.026393] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.034611] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527cbf30-2ce9-031f-a83f-84673c1ecb46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.122375] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.022s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.124486] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.538s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.126852] env[62519]: INFO nova.compute.claims [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1673.160667] env[62519]: INFO nova.scheduler.client.report [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Deleted allocations for instance 4a29bff8-050a-4ad5-9d06-3a59c40b97ee [ 1673.192377] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.310426] env[62519]: INFO nova.compute.manager [None req-64171712-43a0-4e02-85db-f982475f1310 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance to original state: 'active' [ 1673.537424] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527cbf30-2ce9-031f-a83f-84673c1ecb46, 'name': SearchDatastore_Task, 'duration_secs': 0.009894} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.537572] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.537828] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 40507d8c-8f30-45d4-9c65-03f8b1271afb/40507d8c-8f30-45d4-9c65-03f8b1271afb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1673.538334] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9df9098-0346-4c1e-bed5-9c205b70f61e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.545093] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1673.545093] env[62519]: value = "task-1802686" [ 1673.545093] env[62519]: _type = "Task" [ 1673.545093] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.552951] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.668436] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6a10f9-f40f-4798-8fe6-7c97937bac88 tempest-SecurityGroupsTestJSON-38868017 tempest-SecurityGroupsTestJSON-38868017-project-member] Lock "4a29bff8-050a-4ad5-9d06-3a59c40b97ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.753s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.056423] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802686, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.558390] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802686, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59463} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.562182] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 40507d8c-8f30-45d4-9c65-03f8b1271afb/40507d8c-8f30-45d4-9c65-03f8b1271afb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1674.562483] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1674.563712] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7af20bfb-73a3-4185-a0db-afaf74b4cfe5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.571676] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1674.571676] env[62519]: value = "task-1802687" [ 1674.571676] env[62519]: _type = "Task" [ 1674.571676] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.583557] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802687, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.615823] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7450aef-930c-4181-ab24-2c2e0a98094c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.623614] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab816e15-dc84-4707-878e-1088ac27d2aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.653753] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1646819-397c-46cc-88e2-0a3f1fc410b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.661425] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7b8f15-849e-435e-8af1-072c408d750a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.674503] env[62519]: DEBUG nova.compute.provider_tree [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1674.735634] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "a1551278-a306-4534-8d8d-3b3a003dde04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.735925] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "a1551278-a306-4534-8d8d-3b3a003dde04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.736154] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "a1551278-a306-4534-8d8d-3b3a003dde04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.736332] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "a1551278-a306-4534-8d8d-3b3a003dde04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.736495] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "a1551278-a306-4534-8d8d-3b3a003dde04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.738861] env[62519]: INFO nova.compute.manager [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Terminating instance [ 1675.081246] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802687, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079039} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.081621] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1675.082442] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f916b021-877e-430b-a491-c696054d9bb0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.104378] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 40507d8c-8f30-45d4-9c65-03f8b1271afb/40507d8c-8f30-45d4-9c65-03f8b1271afb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1675.105071] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eab308e3-92f5-463e-8603-76501cfd6f94 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.125419] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1675.125419] env[62519]: value = "task-1802688" [ 1675.125419] env[62519]: _type = "Task" [ 1675.125419] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.134064] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802688, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.177254] env[62519]: DEBUG nova.scheduler.client.report [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1675.242245] env[62519]: DEBUG nova.compute.manager [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1675.242490] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1675.243510] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555efa0b-1c8d-4c2d-946c-066c18500d50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.253993] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1675.254449] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04b584ea-9d7f-4200-a959-608b26eea577 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.261916] env[62519]: DEBUG oslo_vmware.api [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1675.261916] env[62519]: value = "task-1802689" [ 1675.261916] env[62519]: _type = "Task" [ 1675.261916] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.271986] env[62519]: DEBUG oslo_vmware.api [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802689, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.636341] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802688, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.682466] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.683018] env[62519]: DEBUG nova.compute.manager [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1675.685942] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.230s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.687435] env[62519]: INFO nova.compute.claims [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1675.773817] env[62519]: DEBUG oslo_vmware.api [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802689, 'name': PowerOffVM_Task, 'duration_secs': 0.27902} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.774129] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1675.774258] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1675.774511] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc4ae6aa-4c4c-4aec-85ee-74e2b4e9e301 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.855875] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1675.856138] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1675.856321] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Deleting the datastore file [datastore1] a1551278-a306-4534-8d8d-3b3a003dde04 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1675.856585] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c263c134-4529-4778-ab77-c6a295f8a632 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.863185] env[62519]: DEBUG oslo_vmware.api [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1675.863185] env[62519]: value = "task-1802691" [ 1675.863185] env[62519]: _type = "Task" [ 1675.863185] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.872221] env[62519]: DEBUG oslo_vmware.api [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.142888] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802688, 'name': ReconfigVM_Task, 'duration_secs': 0.542645} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.143721] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 40507d8c-8f30-45d4-9c65-03f8b1271afb/40507d8c-8f30-45d4-9c65-03f8b1271afb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1676.143968] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1068fdb4-d4ff-41a6-95a4-a647e405a35f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.151422] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1676.151422] env[62519]: value = "task-1802692" [ 1676.151422] env[62519]: _type = "Task" [ 1676.151422] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.162700] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802692, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.197061] env[62519]: DEBUG nova.compute.utils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1676.200261] env[62519]: DEBUG nova.compute.manager [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1676.200344] env[62519]: DEBUG nova.network.neutron [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1676.240124] env[62519]: DEBUG nova.policy [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc4f07a745f54bfc858b733160093586', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17c3617401cc4be0bbb6851dba631e98', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1676.373923] env[62519]: DEBUG oslo_vmware.api [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171984} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.374266] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1676.374533] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1676.374773] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1676.375252] env[62519]: INFO nova.compute.manager [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1676.375661] env[62519]: DEBUG oslo.service.loopingcall [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1676.375936] env[62519]: DEBUG nova.compute.manager [-] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1676.376104] env[62519]: DEBUG nova.network.neutron [-] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1676.661487] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802692, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.662287] env[62519]: DEBUG nova.network.neutron [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Successfully created port: 89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1676.700885] env[62519]: DEBUG nova.compute.manager [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1676.785458] env[62519]: DEBUG nova.compute.manager [req-b7e55ffa-ae73-4567-bf6c-569eb9ab03b6 req-c59019e2-26c7-4438-a0f9-4213ef27cebc service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Received event network-vif-deleted-1cf55e56-b406-4e45-9b63-912d4587f930 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1676.785942] env[62519]: INFO nova.compute.manager [req-b7e55ffa-ae73-4567-bf6c-569eb9ab03b6 req-c59019e2-26c7-4438-a0f9-4213ef27cebc service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Neutron deleted interface 1cf55e56-b406-4e45-9b63-912d4587f930; detaching it from the instance and deleting it from the info cache [ 1676.785942] env[62519]: DEBUG nova.network.neutron [req-b7e55ffa-ae73-4567-bf6c-569eb9ab03b6 req-c59019e2-26c7-4438-a0f9-4213ef27cebc service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.140109] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae11273d-8f86-4698-8133-1df99342db6b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.147921] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6542f9b-aeb0-4aa1-ace8-a343ac16a579 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.180771] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2229832-d84c-48ab-918e-962e3ec94a05 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.186660] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802692, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.191509] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9357814-f51c-4b40-bb35-798651089602 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.205911] env[62519]: DEBUG nova.compute.provider_tree [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.252179] env[62519]: DEBUG nova.network.neutron [-] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.288673] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46e8b7f9-ae9e-4cf2-91ae-ae7ba8d1e1ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.299606] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dda4129-77d8-4845-9e68-35503b81a6e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.339319] env[62519]: DEBUG nova.compute.manager [req-b7e55ffa-ae73-4567-bf6c-569eb9ab03b6 req-c59019e2-26c7-4438-a0f9-4213ef27cebc service nova] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Detach interface failed, port_id=1cf55e56-b406-4e45-9b63-912d4587f930, reason: Instance a1551278-a306-4534-8d8d-3b3a003dde04 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1677.661379] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802692, 'name': Rename_Task, 'duration_secs': 1.154658} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.661653] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1677.661895] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1aeba501-4244-4f75-8574-5ecf93abb782 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.667927] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1677.667927] env[62519]: value = "task-1802693" [ 1677.667927] env[62519]: _type = "Task" [ 1677.667927] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.674950] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.712143] env[62519]: DEBUG nova.scheduler.client.report [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1677.716407] env[62519]: DEBUG nova.compute.manager [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1677.754639] env[62519]: INFO nova.compute.manager [-] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Took 1.38 seconds to deallocate network for instance. [ 1677.831223] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1677.831497] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1677.831702] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1677.831941] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1677.832175] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1677.832368] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1677.832622] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1677.832826] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1677.833043] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1677.833258] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1677.833469] env[62519]: DEBUG nova.virt.hardware [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1677.834658] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd9de86-62af-42fa-8ec1-acce51a6289b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.843377] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562ef3a0-9d82-48f6-8a94-92e6e78257a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.178124] env[62519]: DEBUG oslo_vmware.api [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802693, 'name': PowerOnVM_Task, 'duration_secs': 0.471965} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.178674] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1678.179265] env[62519]: INFO nova.compute.manager [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1678.179696] env[62519]: DEBUG nova.compute.manager [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1678.180574] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10325391-3556-44b7-97d7-23b1ac3d3235 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.219634] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.225370] env[62519]: DEBUG nova.compute.manager [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1678.226468] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.973s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.226801] env[62519]: DEBUG nova.objects.instance [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lazy-loading 'resources' on Instance uuid ed716912-752e-4c6d-b6c6-fb349668fa93 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1678.254088] env[62519]: DEBUG nova.network.neutron [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Successfully updated port: 89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1678.261338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.697488] env[62519]: INFO nova.compute.manager [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Took 43.18 seconds to build instance. [ 1678.733990] env[62519]: DEBUG nova.compute.utils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1678.736756] env[62519]: DEBUG nova.compute.manager [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1678.737264] env[62519]: DEBUG nova.network.neutron [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1678.761179] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.761179] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquired lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.761179] env[62519]: DEBUG nova.network.neutron [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1678.808543] env[62519]: DEBUG nova.policy [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9b4ac19e10d4abdb7d7e54e5d093a8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0755b34e22d4478817ec4e2d57aac2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1678.824968] env[62519]: DEBUG nova.compute.manager [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Received event network-vif-plugged-89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1678.824968] env[62519]: DEBUG oslo_concurrency.lockutils [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] Acquiring lock "9f71845a-e80c-4822-b3de-717f1d83bc49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.824968] env[62519]: DEBUG oslo_concurrency.lockutils [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.824968] env[62519]: DEBUG oslo_concurrency.lockutils [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.824968] env[62519]: DEBUG nova.compute.manager [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] No waiting events found dispatching network-vif-plugged-89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1678.825134] env[62519]: WARNING nova.compute.manager [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Received unexpected event network-vif-plugged-89b7045e-9b9a-45f2-b8e7-e5d063bd1605 for instance with vm_state building and task_state spawning. [ 1678.825655] env[62519]: DEBUG nova.compute.manager [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Received event network-changed-89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1678.825655] env[62519]: DEBUG nova.compute.manager [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Refreshing instance network info cache due to event network-changed-89b7045e-9b9a-45f2-b8e7-e5d063bd1605. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1678.825655] env[62519]: DEBUG oslo_concurrency.lockutils [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] Acquiring lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.240033] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ca7f87-e37a-44c2-a17b-77987d56461c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.241136] env[62519]: DEBUG nova.compute.manager [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1679.249266] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f494d751-dac1-4a02-aa09-b6e91cb403e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.252894] env[62519]: DEBUG nova.network.neutron [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Successfully created port: eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1679.291178] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba8c5d5-9e86-4f13-a99c-05f89b30baf7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.300507] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab27a149-165e-44f1-ab8b-d95ea77ea549 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.315607] env[62519]: DEBUG nova.compute.provider_tree [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.380772] env[62519]: DEBUG nova.network.neutron [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1679.733494] env[62519]: DEBUG oslo_concurrency.lockutils [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "40507d8c-8f30-45d4-9c65-03f8b1271afb" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.818742] env[62519]: DEBUG nova.scheduler.client.report [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1679.885963] env[62519]: DEBUG nova.network.neutron [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updating instance_info_cache with network_info: [{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.206302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-41ebc291-9b01-4875-8d9f-5c4226f038f6 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.037s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.206607] env[62519]: DEBUG oslo_concurrency.lockutils [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.473s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.206804] env[62519]: INFO nova.compute.manager [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Shelving [ 1680.255673] env[62519]: DEBUG nova.compute.manager [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1680.282100] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a2dbdbd111f2da416a04cca0bd8b3807',container_format='bare',created_at=2024-12-12T08:15:08Z,direct_url=,disk_format='vmdk',id=b36eba9a-e231-4006-9afd-ecb0aaa21542,min_disk=1,min_ram=0,name='tempest-test-snap-714656513',owner='a0755b34e22d4478817ec4e2d57aac2a',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-12T08:15:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1680.282350] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1680.282502] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1680.282689] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1680.282830] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1680.283352] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1680.283641] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1680.283936] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1680.284391] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1680.284839] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1680.285077] env[62519]: DEBUG nova.virt.hardware [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1680.286258] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e6f717-990d-49a6-971a-dda01266e4a9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.298506] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4e1ed9-dfe8-4feb-8bbc-2aa763ed51da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.324227] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.326669] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.601s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.328347] env[62519]: INFO nova.compute.claims [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1680.352844] env[62519]: INFO nova.scheduler.client.report [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Deleted allocations for instance ed716912-752e-4c6d-b6c6-fb349668fa93 [ 1680.393018] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Releasing lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.393376] env[62519]: DEBUG nova.compute.manager [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Instance network_info: |[{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1680.393906] env[62519]: DEBUG oslo_concurrency.lockutils [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] Acquired lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.394134] env[62519]: DEBUG nova.network.neutron [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Refreshing network info cache for port 89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1680.396233] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:32:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a407774d-9c2a-411d-9d6f-9ca733b97f3f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89b7045e-9b9a-45f2-b8e7-e5d063bd1605', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1680.405077] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Creating folder: Project (17c3617401cc4be0bbb6851dba631e98). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1680.408757] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a738325e-fbcd-44a6-b7cc-f2adb74dc9c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.425095] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Created folder: Project (17c3617401cc4be0bbb6851dba631e98) in parent group-v373567. [ 1680.425095] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Creating folder: Instances. Parent ref: group-v373753. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1680.425095] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38ff23ab-f8a5-4bc2-9ec6-0cef2e1bc7fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.432729] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Created folder: Instances in parent group-v373753. [ 1680.432833] env[62519]: DEBUG oslo.service.loopingcall [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1680.433047] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1680.433475] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9af21a1-9c37-4940-a440-04f7e9ba0095 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.455499] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1680.455499] env[62519]: value = "task-1802696" [ 1680.455499] env[62519]: _type = "Task" [ 1680.455499] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.463535] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802696, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.861452] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6a19a777-daec-40e9-be4e-bd92f98777ed tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "ed716912-752e-4c6d-b6c6-fb349668fa93" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.401s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.965461] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802696, 'name': CreateVM_Task, 'duration_secs': 0.452943} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.965670] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1680.966387] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.966548] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.966903] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1680.967169] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c464433f-bc9d-471d-996c-df5c00a07fbd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.972075] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1680.972075] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52be3805-3a74-b99e-4425-d238091abef0" [ 1680.972075] env[62519]: _type = "Task" [ 1680.972075] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.980309] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52be3805-3a74-b99e-4425-d238091abef0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.149145] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquiring lock "0789b142-4712-4b7a-9197-c3689f24df7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.152016] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "0789b142-4712-4b7a-9197-c3689f24df7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.216985] env[62519]: DEBUG nova.network.neutron [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updated VIF entry in instance network info cache for port 89b7045e-9b9a-45f2-b8e7-e5d063bd1605. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1681.217685] env[62519]: DEBUG nova.network.neutron [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updating instance_info_cache with network_info: [{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1681.223225] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1681.223509] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72c41fe1-ef51-4302-abad-8fe652bddb00 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.231429] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1681.231429] env[62519]: value = "task-1802698" [ 1681.231429] env[62519]: _type = "Task" [ 1681.231429] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.244015] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802698, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.275190] env[62519]: DEBUG nova.compute.manager [req-ed7e0989-a2aa-498b-b7fb-99903efe9ce2 req-61f3255a-b125-4668-9bdb-8ca4b0a438ac service nova] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Received event network-vif-plugged-eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1681.275431] env[62519]: DEBUG oslo_concurrency.lockutils [req-ed7e0989-a2aa-498b-b7fb-99903efe9ce2 req-61f3255a-b125-4668-9bdb-8ca4b0a438ac service nova] Acquiring lock "54a8aa34-1595-4494-ba68-6915611631ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.275820] env[62519]: DEBUG oslo_concurrency.lockutils [req-ed7e0989-a2aa-498b-b7fb-99903efe9ce2 req-61f3255a-b125-4668-9bdb-8ca4b0a438ac service nova] Lock "54a8aa34-1595-4494-ba68-6915611631ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.275820] env[62519]: DEBUG oslo_concurrency.lockutils [req-ed7e0989-a2aa-498b-b7fb-99903efe9ce2 req-61f3255a-b125-4668-9bdb-8ca4b0a438ac service nova] Lock "54a8aa34-1595-4494-ba68-6915611631ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.276167] env[62519]: DEBUG nova.compute.manager [req-ed7e0989-a2aa-498b-b7fb-99903efe9ce2 req-61f3255a-b125-4668-9bdb-8ca4b0a438ac service nova] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] No waiting events found dispatching network-vif-plugged-eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1681.276482] env[62519]: WARNING nova.compute.manager [req-ed7e0989-a2aa-498b-b7fb-99903efe9ce2 req-61f3255a-b125-4668-9bdb-8ca4b0a438ac service nova] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Received unexpected event network-vif-plugged-eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53 for instance with vm_state building and task_state spawning. [ 1681.388942] env[62519]: DEBUG nova.network.neutron [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Successfully updated port: eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1681.484736] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52be3805-3a74-b99e-4425-d238091abef0, 'name': SearchDatastore_Task, 'duration_secs': 0.009551} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.484909] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.485510] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1681.485510] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1681.485510] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.485824] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1681.487992] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1489e80-e2ce-46ef-a05b-b6a612096dec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.495906] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1681.496111] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1681.496942] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d624da0-88bd-4409-9e97-0c1208996281 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.504378] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1681.504378] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5205633a-2e30-24ad-34bf-bbbade4110b6" [ 1681.504378] env[62519]: _type = "Task" [ 1681.504378] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.511901] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5205633a-2e30-24ad-34bf-bbbade4110b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.653474] env[62519]: DEBUG nova.compute.manager [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1681.714732] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4d84aa-b4f1-49e1-8ff5-ca5c14d9ac33 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.722478] env[62519]: DEBUG oslo_concurrency.lockutils [req-499309be-7231-44c8-80dd-dd14d3be8b3f req-17783302-e389-4b7c-a02d-8549ce849714 service nova] Releasing lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.723856] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fab36f8-523c-47f6-91ee-d72bf82b6f8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.757338] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4e78c7-89c5-4d43-b4ed-6fd8c5152d98 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.764464] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802698, 'name': PowerOffVM_Task, 'duration_secs': 0.180708} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.766688] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1681.767497] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc377d2b-f9db-4196-aae7-0e803489d978 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.770785] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29aa61aa-09a1-44a3-9389-966f30fe55a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.798711] env[62519]: DEBUG nova.compute.provider_tree [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1681.800509] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d3295c-623b-44eb-ac30-96878398f66b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.894844] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "refresh_cache-54a8aa34-1595-4494-ba68-6915611631ce" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1681.894844] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "refresh_cache-54a8aa34-1595-4494-ba68-6915611631ce" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.894844] env[62519]: DEBUG nova.network.neutron [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1682.015139] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5205633a-2e30-24ad-34bf-bbbade4110b6, 'name': SearchDatastore_Task, 'duration_secs': 0.008064} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.015955] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e567a025-7998-4dea-a8cc-6348130f41c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.021223] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1682.021223] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a132c8-d271-6c04-37f1-eb78ee04cfc8" [ 1682.021223] env[62519]: _type = "Task" [ 1682.021223] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.029267] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a132c8-d271-6c04-37f1-eb78ee04cfc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.171744] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.304194] env[62519]: DEBUG nova.scheduler.client.report [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1682.310878] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1682.311551] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2f68f3de-797d-4163-a50c-57a11dfca668 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.318978] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1682.318978] env[62519]: value = "task-1802699" [ 1682.318978] env[62519]: _type = "Task" [ 1682.318978] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.327884] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802699, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.443018] env[62519]: DEBUG nova.network.neutron [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1682.532132] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a132c8-d271-6c04-37f1-eb78ee04cfc8, 'name': SearchDatastore_Task, 'duration_secs': 0.00914} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.532365] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1682.532618] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 9f71845a-e80c-4822-b3de-717f1d83bc49/9f71845a-e80c-4822-b3de-717f1d83bc49.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1682.533046] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ba23fca-2b7d-47e6-99a5-b6cabc418c35 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.538963] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1682.538963] env[62519]: value = "task-1802700" [ 1682.538963] env[62519]: _type = "Task" [ 1682.538963] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.546646] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802700, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.585681] env[62519]: DEBUG nova.network.neutron [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Updating instance_info_cache with network_info: [{"id": "eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53", "address": "fa:16:3e:85:f9:c4", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeeb6f2c0-a9", "ovs_interfaceid": "eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.809759] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.810722] env[62519]: DEBUG nova.compute.manager [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1682.814321] env[62519]: DEBUG oslo_concurrency.lockutils [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.463s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.814905] env[62519]: DEBUG nova.objects.instance [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lazy-loading 'resources' on Instance uuid 09eefc1a-011b-4d2c-ab75-a1fcee740907 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1682.831101] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802699, 'name': CreateSnapshot_Task, 'duration_secs': 0.39932} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.831585] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1682.832874] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8643c34d-20f1-43c9-9a2a-4019bafeeb67 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.048896] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802700, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491976} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.049202] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 9f71845a-e80c-4822-b3de-717f1d83bc49/9f71845a-e80c-4822-b3de-717f1d83bc49.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1683.049392] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1683.049637] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-580872d2-97db-426e-909f-583cf14342cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.055833] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1683.055833] env[62519]: value = "task-1802701" [ 1683.055833] env[62519]: _type = "Task" [ 1683.055833] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.063378] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802701, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.089183] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "refresh_cache-54a8aa34-1595-4494-ba68-6915611631ce" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.089688] env[62519]: DEBUG nova.compute.manager [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Instance network_info: |[{"id": "eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53", "address": "fa:16:3e:85:f9:c4", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeeb6f2c0-a9", "ovs_interfaceid": "eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1683.090140] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:f9:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '594b35bb-a20b-4f0e-bd35-9acf9cc6bf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1683.098151] env[62519]: DEBUG oslo.service.loopingcall [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.098389] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1683.098623] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65099654-4636-44f6-ab95-da09ea587fe3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.118388] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1683.118388] env[62519]: value = "task-1802702" [ 1683.118388] env[62519]: _type = "Task" [ 1683.118388] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.131370] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802702, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.321601] env[62519]: DEBUG nova.compute.utils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1683.326202] env[62519]: DEBUG nova.compute.manager [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1683.326289] env[62519]: DEBUG nova.network.neutron [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1683.330470] env[62519]: DEBUG nova.compute.manager [req-af6d13e1-5dba-4bb1-8c4b-4b571adf85f5 req-d4e4094e-d6f6-431e-a56f-a45dcb5b60da service nova] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Received event network-changed-eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1683.330680] env[62519]: DEBUG nova.compute.manager [req-af6d13e1-5dba-4bb1-8c4b-4b571adf85f5 req-d4e4094e-d6f6-431e-a56f-a45dcb5b60da service nova] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Refreshing instance network info cache due to event network-changed-eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1683.330920] env[62519]: DEBUG oslo_concurrency.lockutils [req-af6d13e1-5dba-4bb1-8c4b-4b571adf85f5 req-d4e4094e-d6f6-431e-a56f-a45dcb5b60da service nova] Acquiring lock "refresh_cache-54a8aa34-1595-4494-ba68-6915611631ce" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1683.331093] env[62519]: DEBUG oslo_concurrency.lockutils [req-af6d13e1-5dba-4bb1-8c4b-4b571adf85f5 req-d4e4094e-d6f6-431e-a56f-a45dcb5b60da service nova] Acquired lock "refresh_cache-54a8aa34-1595-4494-ba68-6915611631ce" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.331325] env[62519]: DEBUG nova.network.neutron [req-af6d13e1-5dba-4bb1-8c4b-4b571adf85f5 req-d4e4094e-d6f6-431e-a56f-a45dcb5b60da service nova] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Refreshing network info cache for port eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1683.352222] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1683.355540] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3e7bdd34-3982-47b4-8779-6ba60d99bdcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.365520] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1683.365520] env[62519]: value = "task-1802703" [ 1683.365520] env[62519]: _type = "Task" [ 1683.365520] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.374339] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802703, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.378049] env[62519]: DEBUG nova.policy [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '053a30aaf81b4cbd8ced7018ebfe1f40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e755fb5a6e94068b6c99b1638081f5f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1683.567865] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802701, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060905} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.568222] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1683.569053] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40520baa-0bef-4f70-bdf3-244a37438d35 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.594484] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 9f71845a-e80c-4822-b3de-717f1d83bc49/9f71845a-e80c-4822-b3de-717f1d83bc49.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1683.594944] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59ca9295-104d-400f-b6ea-4ef7c6d13a9e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.620576] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1683.620576] env[62519]: value = "task-1802704" [ 1683.620576] env[62519]: _type = "Task" [ 1683.620576] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.635427] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802702, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.640102] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.693904] env[62519]: DEBUG nova.network.neutron [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Successfully created port: fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1683.787732] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3962beee-9936-4e02-8bae-3d4e2d956dee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.793857] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44f3d51-e873-4740-98ec-e9339bef42da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.826456] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad5d88e-b683-4b46-a055-7d3cea5412c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.829899] env[62519]: DEBUG nova.compute.manager [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1683.841895] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c832a0be-03e8-4cd4-90a6-1f3811f26c4a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.855628] env[62519]: DEBUG nova.compute.provider_tree [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.875214] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802703, 'name': CloneVM_Task} progress is 93%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.099740] env[62519]: DEBUG nova.network.neutron [req-af6d13e1-5dba-4bb1-8c4b-4b571adf85f5 req-d4e4094e-d6f6-431e-a56f-a45dcb5b60da service nova] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Updated VIF entry in instance network info cache for port eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1684.100137] env[62519]: DEBUG nova.network.neutron [req-af6d13e1-5dba-4bb1-8c4b-4b571adf85f5 req-d4e4094e-d6f6-431e-a56f-a45dcb5b60da service nova] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Updating instance_info_cache with network_info: [{"id": "eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53", "address": "fa:16:3e:85:f9:c4", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeeb6f2c0-a9", "ovs_interfaceid": "eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.130802] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802702, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.135496] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.358083] env[62519]: DEBUG nova.scheduler.client.report [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1684.378301] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802703, 'name': CloneVM_Task} progress is 93%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.413307] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "c61c893f-826b-4874-b253-de6fbffa9e5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.413569] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.413769] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "c61c893f-826b-4874-b253-de6fbffa9e5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.413948] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.414126] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.416148] env[62519]: INFO nova.compute.manager [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Terminating instance [ 1684.603352] env[62519]: DEBUG oslo_concurrency.lockutils [req-af6d13e1-5dba-4bb1-8c4b-4b571adf85f5 req-d4e4094e-d6f6-431e-a56f-a45dcb5b60da service nova] Releasing lock "refresh_cache-54a8aa34-1595-4494-ba68-6915611631ce" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.635049] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802702, 'name': CreateVM_Task, 'duration_secs': 1.432685} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.638413] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1684.638762] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.639468] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.639655] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.640068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1684.640344] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ff7f5b5-77ff-4685-a60e-f908f69f344e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.644634] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1684.644634] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522753c5-65b1-0ad2-d3d1-5e9744885c08" [ 1684.644634] env[62519]: _type = "Task" [ 1684.644634] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.652623] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522753c5-65b1-0ad2-d3d1-5e9744885c08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.842868] env[62519]: DEBUG nova.compute.manager [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1684.862987] env[62519]: DEBUG oslo_concurrency.lockutils [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.049s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.867084] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.283s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.868660] env[62519]: INFO nova.compute.claims [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1684.878617] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1684.878877] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1684.879053] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1684.879382] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1684.879382] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1684.879532] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1684.879737] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1684.879896] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1684.880075] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1684.880238] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1684.880409] env[62519]: DEBUG nova.virt.hardware [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1684.881402] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c0dcf5-05bd-4b5c-b20f-c290f1754ed7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.887054] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802703, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.892199] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf8f4f3-86fd-45d0-aa09-50b78bc6c7e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.897321] env[62519]: INFO nova.scheduler.client.report [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Deleted allocations for instance 09eefc1a-011b-4d2c-ab75-a1fcee740907 [ 1684.920061] env[62519]: DEBUG nova.compute.manager [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1684.920295] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1684.921125] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b09cda-aacb-4700-87c1-9ae2394f99fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.928932] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1684.929193] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45e6f4af-1a65-4ca0-99d1-377dc482a458 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.936106] env[62519]: DEBUG oslo_vmware.api [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1684.936106] env[62519]: value = "task-1802705" [ 1684.936106] env[62519]: _type = "Task" [ 1684.936106] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.944892] env[62519]: DEBUG oslo_vmware.api [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802705, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.137016] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802704, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.160195] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.160195] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Processing image b36eba9a-e231-4006-9afd-ecb0aaa21542 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1685.160195] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542/b36eba9a-e231-4006-9afd-ecb0aaa21542.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.160195] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542/b36eba9a-e231-4006-9afd-ecb0aaa21542.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.160195] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1685.160195] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e11939ac-39f0-4c90-a7bf-5e6304f40d17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.169347] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1685.169546] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1685.171049] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-895ab751-e099-4ccd-84a1-e138ce18b59a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.175933] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1685.175933] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5fafd-644a-3c4a-bc9e-c54bf96321b9" [ 1685.175933] env[62519]: _type = "Task" [ 1685.175933] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.184839] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5fafd-644a-3c4a-bc9e-c54bf96321b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.300422] env[62519]: DEBUG nova.network.neutron [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Successfully updated port: fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1685.386153] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802703, 'name': CloneVM_Task} progress is 95%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.405350] env[62519]: DEBUG oslo_concurrency.lockutils [None req-909c1165-bf3e-436f-ab29-cc0666196a96 tempest-FloatingIPsAssociationTestJSON-131832216 tempest-FloatingIPsAssociationTestJSON-131832216-project-member] Lock "09eefc1a-011b-4d2c-ab75-a1fcee740907" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.542s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.446664] env[62519]: DEBUG oslo_vmware.api [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802705, 'name': PowerOffVM_Task, 'duration_secs': 0.234983} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.446946] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1685.447138] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1685.447414] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac674514-6101-42a2-9e9b-50c0629cfef4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.488904] env[62519]: DEBUG nova.compute.manager [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Received event network-vif-plugged-fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1685.489140] env[62519]: DEBUG oslo_concurrency.lockutils [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] Acquiring lock "1cf285cf-8b4c-4872-b179-72e38c0143e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.489352] env[62519]: DEBUG oslo_concurrency.lockutils [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] Lock "1cf285cf-8b4c-4872-b179-72e38c0143e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.489517] env[62519]: DEBUG oslo_concurrency.lockutils [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] Lock "1cf285cf-8b4c-4872-b179-72e38c0143e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.489683] env[62519]: DEBUG nova.compute.manager [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] No waiting events found dispatching network-vif-plugged-fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1685.489843] env[62519]: WARNING nova.compute.manager [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Received unexpected event network-vif-plugged-fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6 for instance with vm_state building and task_state spawning. [ 1685.490070] env[62519]: DEBUG nova.compute.manager [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Received event network-changed-fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1685.490269] env[62519]: DEBUG nova.compute.manager [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Refreshing instance network info cache due to event network-changed-fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1685.490450] env[62519]: DEBUG oslo_concurrency.lockutils [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] Acquiring lock "refresh_cache-1cf285cf-8b4c-4872-b179-72e38c0143e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.490583] env[62519]: DEBUG oslo_concurrency.lockutils [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] Acquired lock "refresh_cache-1cf285cf-8b4c-4872-b179-72e38c0143e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.490736] env[62519]: DEBUG nova.network.neutron [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Refreshing network info cache for port fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1685.592056] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1685.592304] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1685.592538] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Deleting the datastore file [datastore1] c61c893f-826b-4874-b253-de6fbffa9e5a {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1685.592811] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48b758f4-2877-4458-86aa-ed63ff482ee3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.599785] env[62519]: DEBUG oslo_vmware.api [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for the task: (returnval){ [ 1685.599785] env[62519]: value = "task-1802707" [ 1685.599785] env[62519]: _type = "Task" [ 1685.599785] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.610753] env[62519]: DEBUG oslo_vmware.api [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802707, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.635528] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802704, 'name': ReconfigVM_Task, 'duration_secs': 1.526302} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.635924] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 9f71845a-e80c-4822-b3de-717f1d83bc49/9f71845a-e80c-4822-b3de-717f1d83bc49.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1685.637045] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95c3ecf7-db2b-46fe-af00-ff27c859322d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.644706] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1685.644706] env[62519]: value = "task-1802708" [ 1685.644706] env[62519]: _type = "Task" [ 1685.644706] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.654227] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802708, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.691248] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Preparing fetch location {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1685.695073] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Fetch image to [datastore1] OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2/OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2.vmdk {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1685.695073] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Downloading stream optimized image b36eba9a-e231-4006-9afd-ecb0aaa21542 to [datastore1] OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2/OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2.vmdk on the data store datastore1 as vApp {{(pid=62519) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1685.695073] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Downloading image file data b36eba9a-e231-4006-9afd-ecb0aaa21542 to the ESX as VM named 'OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2' {{(pid=62519) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1685.793019] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1685.793019] env[62519]: value = "resgroup-9" [ 1685.793019] env[62519]: _type = "ResourcePool" [ 1685.793019] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1685.793019] env[62519]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-4dfc3a59-8eb6-409a-95aa-bf141dffe9e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.808080] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "refresh_cache-1cf285cf-8b4c-4872-b179-72e38c0143e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.815377] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lease: (returnval){ [ 1685.815377] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5f4a2-bb0f-db22-70f4-aa896bd18e43" [ 1685.815377] env[62519]: _type = "HttpNfcLease" [ 1685.815377] env[62519]: } obtained for vApp import into resource pool (val){ [ 1685.815377] env[62519]: value = "resgroup-9" [ 1685.815377] env[62519]: _type = "ResourcePool" [ 1685.815377] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1685.818407] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the lease: (returnval){ [ 1685.818407] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5f4a2-bb0f-db22-70f4-aa896bd18e43" [ 1685.818407] env[62519]: _type = "HttpNfcLease" [ 1685.818407] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1685.823690] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1685.823690] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5f4a2-bb0f-db22-70f4-aa896bd18e43" [ 1685.823690] env[62519]: _type = "HttpNfcLease" [ 1685.823690] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1685.886866] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802703, 'name': CloneVM_Task, 'duration_secs': 2.24913} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.888378] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Created linked-clone VM from snapshot [ 1685.891320] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3974be-7395-43c1-98e7-d6ed40a60fee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.901869] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Uploading image bd04b6bc-eb74-425e-b90d-ce3811593054 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1685.928147] env[62519]: DEBUG oslo_vmware.rw_handles [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1685.928147] env[62519]: value = "vm-373758" [ 1685.928147] env[62519]: _type = "VirtualMachine" [ 1685.928147] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1685.928147] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-80fa88f5-98e3-4b51-b47c-69d71afdb8fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.935639] env[62519]: DEBUG oslo_vmware.rw_handles [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lease: (returnval){ [ 1685.935639] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c6550-c0f6-3e5d-bd10-b8842aa7debb" [ 1685.935639] env[62519]: _type = "HttpNfcLease" [ 1685.935639] env[62519]: } obtained for exporting VM: (result){ [ 1685.935639] env[62519]: value = "vm-373758" [ 1685.935639] env[62519]: _type = "VirtualMachine" [ 1685.935639] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1685.935889] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the lease: (returnval){ [ 1685.935889] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c6550-c0f6-3e5d-bd10-b8842aa7debb" [ 1685.935889] env[62519]: _type = "HttpNfcLease" [ 1685.935889] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1685.948410] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1685.948410] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c6550-c0f6-3e5d-bd10-b8842aa7debb" [ 1685.948410] env[62519]: _type = "HttpNfcLease" [ 1685.948410] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1685.948725] env[62519]: DEBUG oslo_vmware.rw_handles [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1685.948725] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c6550-c0f6-3e5d-bd10-b8842aa7debb" [ 1685.948725] env[62519]: _type = "HttpNfcLease" [ 1685.948725] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1685.949532] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875b84d7-0828-4c70-bd30-9963c025e204 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.963688] env[62519]: DEBUG oslo_vmware.rw_handles [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dca328-60f8-4556-1c63-3967de83c17d/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1685.963896] env[62519]: DEBUG oslo_vmware.rw_handles [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dca328-60f8-4556-1c63-3967de83c17d/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1686.064209] env[62519]: DEBUG nova.network.neutron [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1686.084644] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2717e66c-ef6c-4d58-a53e-d73ce043e266 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.111382] env[62519]: DEBUG oslo_vmware.api [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Task: {'id': task-1802707, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240912} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.114015] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1686.114217] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1686.114391] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1686.114563] env[62519]: INFO nova.compute.manager [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1686.114799] env[62519]: DEBUG oslo.service.loopingcall [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1686.119844] env[62519]: DEBUG nova.compute.manager [-] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1686.119844] env[62519]: DEBUG nova.network.neutron [-] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1686.161608] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802708, 'name': Rename_Task, 'duration_secs': 0.142673} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.161608] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1686.161608] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5286f11d-cb7a-4b9e-aa09-2a4445998237 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.165723] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1686.165723] env[62519]: value = "task-1802711" [ 1686.165723] env[62519]: _type = "Task" [ 1686.165723] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.178137] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.194990] env[62519]: DEBUG nova.network.neutron [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.329594] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1686.329594] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5f4a2-bb0f-db22-70f4-aa896bd18e43" [ 1686.329594] env[62519]: _type = "HttpNfcLease" [ 1686.329594] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1686.464753] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36db3a81-b9d8-456b-bb24-cdcccc9614ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.468063] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquiring lock "a59be5e6-2316-4766-933a-4d01dfe4fec1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.468335] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "a59be5e6-2316-4766-933a-4d01dfe4fec1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.468563] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquiring lock "a59be5e6-2316-4766-933a-4d01dfe4fec1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.468755] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "a59be5e6-2316-4766-933a-4d01dfe4fec1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.468927] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "a59be5e6-2316-4766-933a-4d01dfe4fec1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.471277] env[62519]: INFO nova.compute.manager [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Terminating instance [ 1686.476807] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7327d758-64d7-4ace-8bb5-f22c3074ee04 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.509987] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aaed397-f7db-491e-b2bd-3ba699b5ca42 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.519031] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fe24d7-f355-4d8d-8222-8864a5a816ff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.535785] env[62519]: DEBUG nova.compute.provider_tree [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1686.687168] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802711, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.697418] env[62519]: DEBUG oslo_concurrency.lockutils [req-db380ea4-67ca-46ea-b685-73bd070c4ce0 req-a5b66398-7ea5-45c7-be02-3473d05c0d21 service nova] Releasing lock "refresh_cache-1cf285cf-8b4c-4872-b179-72e38c0143e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.698148] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "refresh_cache-1cf285cf-8b4c-4872-b179-72e38c0143e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.698312] env[62519]: DEBUG nova.network.neutron [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1686.830236] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1686.830236] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5f4a2-bb0f-db22-70f4-aa896bd18e43" [ 1686.830236] env[62519]: _type = "HttpNfcLease" [ 1686.830236] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1686.830604] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1686.830604] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b5f4a2-bb0f-db22-70f4-aa896bd18e43" [ 1686.830604] env[62519]: _type = "HttpNfcLease" [ 1686.830604] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1686.831374] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ce32be-c773-4467-b5b1-b2a610a0e1d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.841356] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de61b6-17fc-13cd-4896-20cc3ccbbdc6/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1686.841894] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de61b6-17fc-13cd-4896-20cc3ccbbdc6/disk-0.vmdk. {{(pid=62519) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1686.917075] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-28a191dc-1f40-4b5a-9a47-39684d7b73a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.982390] env[62519]: DEBUG nova.compute.manager [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1686.983463] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1686.984080] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16627cfb-49c4-4c80-a0d6-f8bc61091d25 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.994829] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1686.995202] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e11adbf-fefc-4a01-a6aa-bcd991e3bcd1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.003059] env[62519]: DEBUG oslo_vmware.api [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1687.003059] env[62519]: value = "task-1802712" [ 1687.003059] env[62519]: _type = "Task" [ 1687.003059] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.012283] env[62519]: DEBUG oslo_vmware.api [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802712, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.039149] env[62519]: DEBUG nova.scheduler.client.report [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1687.182542] env[62519]: DEBUG oslo_vmware.api [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1802711, 'name': PowerOnVM_Task, 'duration_secs': 0.79188} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.183325] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1687.183616] env[62519]: INFO nova.compute.manager [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Took 9.47 seconds to spawn the instance on the hypervisor. [ 1687.184071] env[62519]: DEBUG nova.compute.manager [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1687.184204] env[62519]: DEBUG nova.network.neutron [-] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.186264] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8735083b-7b0d-44a8-a7b0-8f0d0708b968 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.260439] env[62519]: DEBUG nova.network.neutron [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1687.514486] env[62519]: DEBUG oslo_vmware.api [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802712, 'name': PowerOffVM_Task, 'duration_secs': 0.302204} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.516317] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1687.516523] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1687.517646] env[62519]: DEBUG nova.network.neutron [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Updating instance_info_cache with network_info: [{"id": "fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6", "address": "fa:16:3e:a7:b2:3f", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdafb1fb-4c", "ovs_interfaceid": "fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.519665] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d37e7f4-8943-4a27-84ff-5f85942825b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.523684] env[62519]: DEBUG nova.compute.manager [req-b8b02149-3079-44d1-ba11-002904531ed0 req-3f211073-96b1-4a3a-82a3-54cd69715230 service nova] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Received event network-vif-deleted-8a1a2471-fe2e-4bb8-917a-135d0d5f4859 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1687.547990] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.681s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.548574] env[62519]: DEBUG nova.compute.manager [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1687.551418] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.204s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.551674] env[62519]: DEBUG nova.objects.instance [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lazy-loading 'resources' on Instance uuid 8659f63a-5df9-4ff8-84dd-0722026dc820 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1687.657955] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1687.658221] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1687.658338] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Deleting the datastore file [datastore1] a59be5e6-2316-4766-933a-4d01dfe4fec1 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1687.658633] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84cd9a83-172e-4edc-bc13-7cd7fe2b4b8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.669458] env[62519]: DEBUG oslo_vmware.api [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for the task: (returnval){ [ 1687.669458] env[62519]: value = "task-1802714" [ 1687.669458] env[62519]: _type = "Task" [ 1687.669458] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.677825] env[62519]: DEBUG oslo_vmware.api [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802714, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.690219] env[62519]: INFO nova.compute.manager [-] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Took 1.57 seconds to deallocate network for instance. [ 1687.700615] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Completed reading data from the image iterator. {{(pid=62519) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1687.700822] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de61b6-17fc-13cd-4896-20cc3ccbbdc6/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1687.705661] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc38c5f-f7e5-494e-a696-be29ac48c12d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.712208] env[62519]: INFO nova.compute.manager [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Took 47.15 seconds to build instance. [ 1687.718102] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de61b6-17fc-13cd-4896-20cc3ccbbdc6/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1687.718362] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de61b6-17fc-13cd-4896-20cc3ccbbdc6/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1687.718629] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-f1cf4636-c74d-4216-96aa-9d1e8625fc99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.919873] env[62519]: DEBUG oslo_vmware.rw_handles [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de61b6-17fc-13cd-4896-20cc3ccbbdc6/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1687.920233] env[62519]: INFO nova.virt.vmwareapi.images [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Downloaded image file data b36eba9a-e231-4006-9afd-ecb0aaa21542 [ 1687.923478] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8fe8af-49ca-4673-8aaf-24008f870d99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.944048] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d39b655-77cc-4e03-8246-b5263fd234f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.969172] env[62519]: INFO nova.virt.vmwareapi.images [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] The imported VM was unregistered [ 1687.971353] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Caching image {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1687.971620] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating directory with path [datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542 {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1687.971894] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79632b77-11f0-4a78-a197-f4fc13163b92 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.983507] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Created directory with path [datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542 {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1687.983716] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2/OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2.vmdk to [datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542/b36eba9a-e231-4006-9afd-ecb0aaa21542.vmdk. {{(pid=62519) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1687.984034] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-e8ddd4a4-e981-4412-a227-1c0df56cbfcd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.992830] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1687.992830] env[62519]: value = "task-1802716" [ 1687.992830] env[62519]: _type = "Task" [ 1687.992830] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.001425] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802716, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.022194] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "refresh_cache-1cf285cf-8b4c-4872-b179-72e38c0143e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.022657] env[62519]: DEBUG nova.compute.manager [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Instance network_info: |[{"id": "fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6", "address": "fa:16:3e:a7:b2:3f", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdafb1fb-4c", "ovs_interfaceid": "fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1688.023237] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:b2:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1688.031085] env[62519]: DEBUG oslo.service.loopingcall [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1688.031608] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1688.031848] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7571e2e9-4eb0-400c-9cd0-9ada28e4a631 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.054762] env[62519]: DEBUG nova.compute.utils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1688.061470] env[62519]: DEBUG nova.compute.manager [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1688.061702] env[62519]: DEBUG nova.network.neutron [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1688.063734] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1688.063734] env[62519]: value = "task-1802717" [ 1688.063734] env[62519]: _type = "Task" [ 1688.063734] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.073144] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802717, 'name': CreateVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.127198] env[62519]: DEBUG nova.policy [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81bb350c0ff54453b99b45ac84a82935', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '549cc35f5ff249f6bf22c67872883db0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1688.180361] env[62519]: DEBUG oslo_vmware.api [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Task: {'id': task-1802714, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18185} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.180361] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1688.180972] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1688.181335] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1688.181683] env[62519]: INFO nova.compute.manager [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1688.182093] env[62519]: DEBUG oslo.service.loopingcall [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1688.185885] env[62519]: DEBUG nova.compute.manager [-] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1688.186369] env[62519]: DEBUG nova.network.neutron [-] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1688.199232] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.506122] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802716, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.561336] env[62519]: DEBUG nova.compute.manager [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1688.589395] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802717, 'name': CreateVM_Task, 'duration_secs': 0.497843} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.589395] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1688.590333] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.590333] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.590552] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1688.590894] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67cf4442-b98e-4dc3-93e5-fa5f425f8e25 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.598825] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1688.598825] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525695e8-c30d-3b1c-b2f3-5057b9776311" [ 1688.598825] env[62519]: _type = "Task" [ 1688.598825] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.618602] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525695e8-c30d-3b1c-b2f3-5057b9776311, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.644111] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53efde1a-7cc3-4c0a-b8b9-7640fac2eee4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.660431] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a5cc5e-aa13-46ae-9bb2-3b68ea2c8764 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.693078] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d20089b-86aa-40a5-a6ba-260b32b627b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.701111] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c46180-77ed-4790-b549-1bd64d8ac73a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.718440] env[62519]: DEBUG nova.compute.provider_tree [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1688.720542] env[62519]: DEBUG nova.network.neutron [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Successfully created port: f0eeb5fa-bf84-45a4-a90f-85d593feed7e {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1689.003947] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802716, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.114724] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525695e8-c30d-3b1c-b2f3-5057b9776311, 'name': SearchDatastore_Task, 'duration_secs': 0.089972} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.117073] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.117073] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1689.117326] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.117524] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.117652] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1689.119864] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-299e27d3-6817-45ec-b3e9-b97c510e7230 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.137670] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1689.138072] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1689.139431] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcc652f0-3987-4a7a-adf8-d0c51291a426 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.150645] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1689.150645] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529753e8-3561-a4a5-4d93-2720ea6f3259" [ 1689.150645] env[62519]: _type = "Task" [ 1689.150645] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.160157] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529753e8-3561-a4a5-4d93-2720ea6f3259, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.227276] env[62519]: DEBUG nova.scheduler.client.report [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1689.230238] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ba83c5d0-762b-42f1-8643-2a65dc34a940 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.688s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.451024] env[62519]: DEBUG nova.compute.manager [req-d9292c0f-8aa2-46f8-ae06-3b346af32739 req-a565295a-3235-4c46-b5fb-7651baed5a45 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Received event network-changed-89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1689.451260] env[62519]: DEBUG nova.compute.manager [req-d9292c0f-8aa2-46f8-ae06-3b346af32739 req-a565295a-3235-4c46-b5fb-7651baed5a45 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Refreshing instance network info cache due to event network-changed-89b7045e-9b9a-45f2-b8e7-e5d063bd1605. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1689.451572] env[62519]: DEBUG oslo_concurrency.lockutils [req-d9292c0f-8aa2-46f8-ae06-3b346af32739 req-a565295a-3235-4c46-b5fb-7651baed5a45 service nova] Acquiring lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.454036] env[62519]: DEBUG oslo_concurrency.lockutils [req-d9292c0f-8aa2-46f8-ae06-3b346af32739 req-a565295a-3235-4c46-b5fb-7651baed5a45 service nova] Acquired lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.454036] env[62519]: DEBUG nova.network.neutron [req-d9292c0f-8aa2-46f8-ae06-3b346af32739 req-a565295a-3235-4c46-b5fb-7651baed5a45 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Refreshing network info cache for port 89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1689.511777] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802716, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.526369] env[62519]: DEBUG nova.network.neutron [-] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.555961] env[62519]: DEBUG nova.compute.manager [req-c7879f4f-3ccc-41a9-97aa-6e3216851180 req-f6831891-0e17-4305-af29-1cc9c0799226 service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Received event network-vif-deleted-883e1605-a678-4428-b758-c717d5d49ce2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1689.556206] env[62519]: INFO nova.compute.manager [req-c7879f4f-3ccc-41a9-97aa-6e3216851180 req-f6831891-0e17-4305-af29-1cc9c0799226 service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Neutron deleted interface 883e1605-a678-4428-b758-c717d5d49ce2; detaching it from the instance and deleting it from the info cache [ 1689.556802] env[62519]: DEBUG nova.network.neutron [req-c7879f4f-3ccc-41a9-97aa-6e3216851180 req-f6831891-0e17-4305-af29-1cc9c0799226 service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.574534] env[62519]: DEBUG nova.compute.manager [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1689.608512] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1689.608999] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1689.609410] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1689.609798] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1689.610150] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1689.610499] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1689.610864] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1689.611205] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1689.611565] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1689.611895] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1689.612229] env[62519]: DEBUG nova.virt.hardware [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1689.616019] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7225912-25d0-4854-96dd-4c24fd1546ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.623504] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0d50f5-ddb9-4129-a362-207eac37b5c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.663184] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529753e8-3561-a4a5-4d93-2720ea6f3259, 'name': SearchDatastore_Task, 'duration_secs': 0.09876} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.663184] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9586200e-8f60-46be-8923-27159de41daa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.671028] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1689.671028] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523da37e-1773-dc67-e6f1-9f6720a69791" [ 1689.671028] env[62519]: _type = "Task" [ 1689.671028] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.681391] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523da37e-1773-dc67-e6f1-9f6720a69791, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.733448] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.181s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.735458] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.635s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.735717] env[62519]: DEBUG nova.objects.instance [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lazy-loading 'resources' on Instance uuid 45d1aa86-a5c8-4e75-a6c8-5f55461702f8 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1689.763331] env[62519]: INFO nova.scheduler.client.report [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Deleted allocations for instance 8659f63a-5df9-4ff8-84dd-0722026dc820 [ 1690.010965] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802716, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.030513] env[62519]: INFO nova.compute.manager [-] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Took 1.84 seconds to deallocate network for instance. [ 1690.064406] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7efd67c-e533-4f54-bc9e-9e503d06b446 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.071055] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e58dc93-e7e6-40ae-82ad-a9fa8b5bd245 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.117081] env[62519]: DEBUG nova.compute.manager [req-c7879f4f-3ccc-41a9-97aa-6e3216851180 req-f6831891-0e17-4305-af29-1cc9c0799226 service nova] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Detach interface failed, port_id=883e1605-a678-4428-b758-c717d5d49ce2, reason: Instance a59be5e6-2316-4766-933a-4d01dfe4fec1 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1690.180099] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523da37e-1773-dc67-e6f1-9f6720a69791, 'name': SearchDatastore_Task, 'duration_secs': 0.114068} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.180312] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.180583] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1cf285cf-8b4c-4872-b179-72e38c0143e0/1cf285cf-8b4c-4872-b179-72e38c0143e0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1690.180852] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1c0c803-48b7-4f4d-b4a5-69a0b946dad7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.187974] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1690.187974] env[62519]: value = "task-1802718" [ 1690.187974] env[62519]: _type = "Task" [ 1690.187974] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.196676] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.273210] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6efd5194-063f-45af-b886-0e88145beea5 tempest-ServerTagsTestJSON-1513198723 tempest-ServerTagsTestJSON-1513198723-project-member] Lock "8659f63a-5df9-4ff8-84dd-0722026dc820" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.619s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.513037] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802716, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.540523] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.596655] env[62519]: DEBUG nova.network.neutron [req-d9292c0f-8aa2-46f8-ae06-3b346af32739 req-a565295a-3235-4c46-b5fb-7651baed5a45 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updated VIF entry in instance network info cache for port 89b7045e-9b9a-45f2-b8e7-e5d063bd1605. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1690.597039] env[62519]: DEBUG nova.network.neutron [req-d9292c0f-8aa2-46f8-ae06-3b346af32739 req-a565295a-3235-4c46-b5fb-7651baed5a45 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updating instance_info_cache with network_info: [{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.697277] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802718, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.724318] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277371b9-f692-46d0-8a9a-b35d0393b71a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.732136] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82744ad0-b148-4152-9015-72d6114c40d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.768015] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afa03c9-b56b-49df-918f-499bac074470 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.777039] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d14ed57-5c23-4201-8ddf-b4fa8afd68a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.791595] env[62519]: DEBUG nova.compute.provider_tree [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1691.007949] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802716, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.653568} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.008268] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2/OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2.vmdk to [datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542/b36eba9a-e231-4006-9afd-ecb0aaa21542.vmdk. [ 1691.008457] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Cleaning up location [datastore1] OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1691.008616] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_e5975c7e-1c4e-4932-9686-3d10cf5c71a2 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1691.008902] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ea836fe-8023-4ff5-80b2-e21e170bb775 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.017148] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1691.017148] env[62519]: value = "task-1802719" [ 1691.017148] env[62519]: _type = "Task" [ 1691.017148] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.026795] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802719, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.101564] env[62519]: DEBUG oslo_concurrency.lockutils [req-d9292c0f-8aa2-46f8-ae06-3b346af32739 req-a565295a-3235-4c46-b5fb-7651baed5a45 service nova] Releasing lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.198301] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802718, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.907253} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.198516] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1cf285cf-8b4c-4872-b179-72e38c0143e0/1cf285cf-8b4c-4872-b179-72e38c0143e0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1691.198727] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1691.199033] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-996c15be-b80b-452f-9cb6-c0d586098443 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.206330] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1691.206330] env[62519]: value = "task-1802720" [ 1691.206330] env[62519]: _type = "Task" [ 1691.206330] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.217122] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802720, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.234062] env[62519]: DEBUG nova.network.neutron [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Successfully updated port: f0eeb5fa-bf84-45a4-a90f-85d593feed7e {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1691.294609] env[62519]: DEBUG nova.scheduler.client.report [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1691.531410] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802719, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1102} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.531719] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1691.531893] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542/b36eba9a-e231-4006-9afd-ecb0aaa21542.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.534454] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542/b36eba9a-e231-4006-9afd-ecb0aaa21542.vmdk to [datastore1] 54a8aa34-1595-4494-ba68-6915611631ce/54a8aa34-1595-4494-ba68-6915611631ce.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1691.534673] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-164ff3eb-051e-4db9-80a4-428a6e20713e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.542083] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1691.542083] env[62519]: value = "task-1802721" [ 1691.542083] env[62519]: _type = "Task" [ 1691.542083] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.550535] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802721, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.599340] env[62519]: DEBUG nova.compute.manager [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Received event network-vif-plugged-f0eeb5fa-bf84-45a4-a90f-85d593feed7e {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1691.599559] env[62519]: DEBUG oslo_concurrency.lockutils [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] Acquiring lock "88f9351c-253b-49dd-a88e-b8585ea742ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.599761] env[62519]: DEBUG oslo_concurrency.lockutils [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.599925] env[62519]: DEBUG oslo_concurrency.lockutils [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.600101] env[62519]: DEBUG nova.compute.manager [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] No waiting events found dispatching network-vif-plugged-f0eeb5fa-bf84-45a4-a90f-85d593feed7e {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1691.600263] env[62519]: WARNING nova.compute.manager [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Received unexpected event network-vif-plugged-f0eeb5fa-bf84-45a4-a90f-85d593feed7e for instance with vm_state building and task_state spawning. [ 1691.600421] env[62519]: DEBUG nova.compute.manager [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Received event network-changed-f0eeb5fa-bf84-45a4-a90f-85d593feed7e {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1691.600570] env[62519]: DEBUG nova.compute.manager [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Refreshing instance network info cache due to event network-changed-f0eeb5fa-bf84-45a4-a90f-85d593feed7e. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1691.600748] env[62519]: DEBUG oslo_concurrency.lockutils [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] Acquiring lock "refresh_cache-88f9351c-253b-49dd-a88e-b8585ea742ac" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.600882] env[62519]: DEBUG oslo_concurrency.lockutils [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] Acquired lock "refresh_cache-88f9351c-253b-49dd-a88e-b8585ea742ac" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.603095] env[62519]: DEBUG nova.network.neutron [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Refreshing network info cache for port f0eeb5fa-bf84-45a4-a90f-85d593feed7e {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1691.717014] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802720, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.169635} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.717704] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1691.718598] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e934273c-88e8-4ed0-a55b-eed10665917d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.743564] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 1cf285cf-8b4c-4872-b179-72e38c0143e0/1cf285cf-8b4c-4872-b179-72e38c0143e0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1691.744556] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "refresh_cache-88f9351c-253b-49dd-a88e-b8585ea742ac" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.745102] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b99573e-336a-4ce0-abf3-5146115db433 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.767080] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1691.767080] env[62519]: value = "task-1802722" [ 1691.767080] env[62519]: _type = "Task" [ 1691.767080] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.774489] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802722, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.805302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.070s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.809510] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.586s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.809510] env[62519]: DEBUG nova.objects.instance [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lazy-loading 'resources' on Instance uuid 8070aa59-3547-460a-b914-0e84620023d0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1691.831411] env[62519]: INFO nova.scheduler.client.report [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Deleted allocations for instance 45d1aa86-a5c8-4e75-a6c8-5f55461702f8 [ 1692.059683] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802721, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.164736] env[62519]: DEBUG nova.network.neutron [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1692.278444] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802722, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.343708] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4054b116-0ba8-401b-909e-ea20d3c324af tempest-ServerGroupTestJSON-1731423365 tempest-ServerGroupTestJSON-1731423365-project-member] Lock "45d1aa86-a5c8-4e75-a6c8-5f55461702f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.797s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.373984] env[62519]: DEBUG nova.network.neutron [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.554269] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802721, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.783262] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802722, 'name': ReconfigVM_Task, 'duration_secs': 0.542824} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.783867] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 1cf285cf-8b4c-4872-b179-72e38c0143e0/1cf285cf-8b4c-4872-b179-72e38c0143e0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1692.784570] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab2accd1-648b-4449-8518-469424884317 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.793951] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1692.793951] env[62519]: value = "task-1802723" [ 1692.793951] env[62519]: _type = "Task" [ 1692.793951] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.805267] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802723, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.845147] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818a6de3-2a4a-4918-aefb-ff1d496a056e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.862042] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19183b6e-c798-427c-ae63-92037aacc98c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.897243] env[62519]: DEBUG oslo_concurrency.lockutils [req-2fb19961-6a03-4ab8-b9a7-653b1003d667 req-b1524cbf-481c-4fcf-b495-6fb6dc912207 service nova] Releasing lock "refresh_cache-88f9351c-253b-49dd-a88e-b8585ea742ac" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.898104] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "refresh_cache-88f9351c-253b-49dd-a88e-b8585ea742ac" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.898215] env[62519]: DEBUG nova.network.neutron [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1692.900032] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc6a0d4-3cf9-4597-9195-e0b423cc5bab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.911237] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73871e82-ed32-4567-94af-377423b21a68 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.931122] env[62519]: DEBUG nova.compute.provider_tree [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1693.056317] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802721, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.304777] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802723, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.432914] env[62519]: DEBUG nova.scheduler.client.report [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1693.438627] env[62519]: DEBUG nova.network.neutron [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1693.556720] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802721, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.606697] env[62519]: DEBUG nova.network.neutron [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Updating instance_info_cache with network_info: [{"id": "f0eeb5fa-bf84-45a4-a90f-85d593feed7e", "address": "fa:16:3e:f5:9b:e3", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0eeb5fa-bf", "ovs_interfaceid": "f0eeb5fa-bf84-45a4-a90f-85d593feed7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.808394] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802723, 'name': Rename_Task, 'duration_secs': 0.674521} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.808394] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1693.808394] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee6ba8f9-3c60-4245-a399-99ef75182d32 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.814613] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1693.814613] env[62519]: value = "task-1802724" [ 1693.814613] env[62519]: _type = "Task" [ 1693.814613] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.827807] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.939689] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.942447] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.283s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.944850] env[62519]: INFO nova.compute.claims [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1693.971290] env[62519]: INFO nova.scheduler.client.report [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Deleted allocations for instance 8070aa59-3547-460a-b914-0e84620023d0 [ 1694.057418] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802721, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.109744] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "refresh_cache-88f9351c-253b-49dd-a88e-b8585ea742ac" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.109906] env[62519]: DEBUG nova.compute.manager [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Instance network_info: |[{"id": "f0eeb5fa-bf84-45a4-a90f-85d593feed7e", "address": "fa:16:3e:f5:9b:e3", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0eeb5fa-bf", "ovs_interfaceid": "f0eeb5fa-bf84-45a4-a90f-85d593feed7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1694.110403] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:9b:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0eeb5fa-bf84-45a4-a90f-85d593feed7e', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1694.119374] env[62519]: DEBUG oslo.service.loopingcall [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1694.119892] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1694.119892] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f194c92a-192a-4acf-a4f6-e731987cbc70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.143502] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1694.143502] env[62519]: value = "task-1802725" [ 1694.143502] env[62519]: _type = "Task" [ 1694.143502] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.152316] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802725, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.329313] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802724, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.483224] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bbbe2e6d-2a93-4fc7-a50a-4b3277528c06 tempest-ListServerFiltersTestJSON-2084970753 tempest-ListServerFiltersTestJSON-2084970753-project-member] Lock "8070aa59-3547-460a-b914-0e84620023d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.697s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.556504] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802721, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.565259] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquiring lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.565589] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.656430] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802725, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.827633] env[62519]: DEBUG oslo_vmware.api [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802724, 'name': PowerOnVM_Task, 'duration_secs': 0.729499} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.828818] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1694.828818] env[62519]: INFO nova.compute.manager [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Took 9.99 seconds to spawn the instance on the hypervisor. [ 1694.828818] env[62519]: DEBUG nova.compute.manager [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1694.829212] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73703e11-31a8-4c01-99cb-ffc486f2a32c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.057394] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802721, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.219765} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.060153] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b36eba9a-e231-4006-9afd-ecb0aaa21542/b36eba9a-e231-4006-9afd-ecb0aaa21542.vmdk to [datastore1] 54a8aa34-1595-4494-ba68-6915611631ce/54a8aa34-1595-4494-ba68-6915611631ce.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1695.061253] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0194a032-499d-41be-82e5-cb73f7457092 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.076604] env[62519]: DEBUG nova.compute.manager [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1695.089140] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 54a8aa34-1595-4494-ba68-6915611631ce/54a8aa34-1595-4494-ba68-6915611631ce.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1695.092597] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b434632d-2b27-49f1-a149-9dea4897262b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.114641] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1695.114641] env[62519]: value = "task-1802726" [ 1695.114641] env[62519]: _type = "Task" [ 1695.114641] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.127362] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802726, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.160068] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802725, 'name': CreateVM_Task, 'duration_secs': 0.775835} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.160068] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1695.160068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.160068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.160068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1695.160068] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84902412-1b2b-4e00-955b-b23734280782 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.164277] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1695.164277] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52abcaec-a515-12cc-b600-a8e45952c402" [ 1695.164277] env[62519]: _type = "Task" [ 1695.164277] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.173058] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52abcaec-a515-12cc-b600-a8e45952c402, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.352561] env[62519]: INFO nova.compute.manager [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Took 45.65 seconds to build instance. [ 1695.480866] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a861fd-3313-4c28-9f59-a3e9f9b81bb5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.492725] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64980968-0783-4bc8-a611-e4fe60bca1dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.533543] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c328a9a2-777e-4030-b465-398bf86c94b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.542975] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00635673-92e5-447a-b058-e889b1778a2e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.562877] env[62519]: DEBUG nova.compute.provider_tree [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1695.610960] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.625876] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.679329] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52abcaec-a515-12cc-b600-a8e45952c402, 'name': SearchDatastore_Task, 'duration_secs': 0.011026} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.679898] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.680103] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1695.680362] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.680511] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.680780] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1695.681028] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6462407-c5b6-4bf3-9cda-345e8b691528 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.691023] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1695.691290] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1695.692132] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09669233-4125-4084-b309-2e987198426e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.698259] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1695.698259] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52697889-3a0a-e69b-6bb1-5fbbd1caaf74" [ 1695.698259] env[62519]: _type = "Task" [ 1695.698259] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.706903] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52697889-3a0a-e69b-6bb1-5fbbd1caaf74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.036681] env[62519]: DEBUG oslo_vmware.rw_handles [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dca328-60f8-4556-1c63-3967de83c17d/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1696.036681] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac33730-d98f-47d5-9a37-5973b062dd8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.046468] env[62519]: DEBUG oslo_vmware.rw_handles [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dca328-60f8-4556-1c63-3967de83c17d/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1696.047264] env[62519]: ERROR oslo_vmware.rw_handles [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dca328-60f8-4556-1c63-3967de83c17d/disk-0.vmdk due to incomplete transfer. [ 1696.047264] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0493ef2d-f5bc-41f6-990f-f4fa1e7a5425 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.054077] env[62519]: DEBUG oslo_vmware.rw_handles [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52dca328-60f8-4556-1c63-3967de83c17d/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1696.054284] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Uploaded image bd04b6bc-eb74-425e-b90d-ce3811593054 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1696.056601] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1696.056852] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-25535720-747b-47e3-8eb2-06677ce4f0cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.062604] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1696.062604] env[62519]: value = "task-1802727" [ 1696.062604] env[62519]: _type = "Task" [ 1696.062604] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.066344] env[62519]: DEBUG nova.scheduler.client.report [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1696.077305] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802727, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.126186] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802726, 'name': ReconfigVM_Task, 'duration_secs': 0.978049} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.126550] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 54a8aa34-1595-4494-ba68-6915611631ce/54a8aa34-1595-4494-ba68-6915611631ce.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1696.127431] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91663b80-29c5-40b1-8afa-4b71c745a101 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.133357] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1696.133357] env[62519]: value = "task-1802728" [ 1696.133357] env[62519]: _type = "Task" [ 1696.133357] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.143023] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802728, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.181170] env[62519]: DEBUG oslo_concurrency.lockutils [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "1cf285cf-8b4c-4872-b179-72e38c0143e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.212263] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52697889-3a0a-e69b-6bb1-5fbbd1caaf74, 'name': SearchDatastore_Task, 'duration_secs': 0.010288} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.213160] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b378bcf-59b5-4d6e-a81d-1da755a0765d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.218670] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1696.218670] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c5790-9e1c-2b8f-0056-22feee7179ca" [ 1696.218670] env[62519]: _type = "Task" [ 1696.218670] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.227084] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c5790-9e1c-2b8f-0056-22feee7179ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.571513] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.629s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.572055] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1696.580397] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.874s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.581919] env[62519]: INFO nova.compute.claims [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1696.584595] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802727, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.643384] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802728, 'name': Rename_Task, 'duration_secs': 0.224375} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.643682] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1696.643967] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55390a7e-befd-4666-8c65-8f412d90be9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.650273] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1696.650273] env[62519]: value = "task-1802729" [ 1696.650273] env[62519]: _type = "Task" [ 1696.650273] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.662682] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802729, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.729908] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c5790-9e1c-2b8f-0056-22feee7179ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010203} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.730198] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.730460] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac/88f9351c-253b-49dd-a88e-b8585ea742ac.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1696.730713] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f82d5248-c1c8-46c2-8cfd-bfdebaec1ca3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.737879] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1696.737879] env[62519]: value = "task-1802730" [ 1696.737879] env[62519]: _type = "Task" [ 1696.737879] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.746367] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.862427] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b6d3bfb6-d0cd-4359-b03b-5e40dfd88595 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "1cf285cf-8b4c-4872-b179-72e38c0143e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.170s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.862734] env[62519]: DEBUG oslo_concurrency.lockutils [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "1cf285cf-8b4c-4872-b179-72e38c0143e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.682s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.862967] env[62519]: DEBUG oslo_concurrency.lockutils [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "1cf285cf-8b4c-4872-b179-72e38c0143e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.864032] env[62519]: DEBUG oslo_concurrency.lockutils [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "1cf285cf-8b4c-4872-b179-72e38c0143e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.864032] env[62519]: DEBUG oslo_concurrency.lockutils [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "1cf285cf-8b4c-4872-b179-72e38c0143e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.866196] env[62519]: INFO nova.compute.manager [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Terminating instance [ 1697.079559] env[62519]: DEBUG nova.compute.utils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1697.081198] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802727, 'name': Destroy_Task, 'duration_secs': 0.7144} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.081501] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1697.081671] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1697.083691] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Destroyed the VM [ 1697.083959] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1697.084487] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8285807d-4e4b-460a-ad6c-92b711cda83d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.096993] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1697.096993] env[62519]: value = "task-1802731" [ 1697.096993] env[62519]: _type = "Task" [ 1697.096993] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.108996] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802731, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.156756] env[62519]: DEBUG nova.policy [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e80a7ce112e4087af60e7de4d915118', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1e17b34cbef497985ff79e77d5b0f89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1697.165664] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802729, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.250662] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802730, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.370423] env[62519]: DEBUG nova.compute.manager [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1697.371126] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1697.372278] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fb9a49-5f1a-4a77-989c-f302c63ae3fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.389031] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1697.389479] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b28e4bfd-4e20-4a46-a8f2-6cb72a5a0cbc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.398124] env[62519]: DEBUG oslo_vmware.api [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1697.398124] env[62519]: value = "task-1802732" [ 1697.398124] env[62519]: _type = "Task" [ 1697.398124] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.407096] env[62519]: DEBUG oslo_vmware.api [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.585194] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1697.618137] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802731, 'name': RemoveSnapshot_Task, 'duration_secs': 0.462478} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.620509] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1697.622512] env[62519]: DEBUG nova.compute.manager [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1697.622734] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ad60c6-9da9-4509-a7a5-7643d21fe863 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.666531] env[62519]: DEBUG oslo_vmware.api [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802729, 'name': PowerOnVM_Task, 'duration_secs': 0.769536} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.666847] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1697.667136] env[62519]: INFO nova.compute.manager [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Took 17.41 seconds to spawn the instance on the hypervisor. [ 1697.667359] env[62519]: DEBUG nova.compute.manager [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1697.668697] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d777fd8a-90b4-4da9-afba-f9cf0395c17b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.687016] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Successfully created port: ccc74c83-e994-4ae0-b4c0-92ba09a53009 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1697.754228] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802730, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.633433} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.754603] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac/88f9351c-253b-49dd-a88e-b8585ea742ac.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1697.754848] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1697.755114] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9157ad1-4912-4ab8-808c-5d2ed3923cf7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.766912] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1697.766912] env[62519]: value = "task-1802733" [ 1697.766912] env[62519]: _type = "Task" [ 1697.766912] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.778257] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802733, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.912715] env[62519]: DEBUG oslo_vmware.api [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802732, 'name': PowerOffVM_Task, 'duration_secs': 0.263858} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.913046] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1697.913252] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1697.913517] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7438b32b-6945-4c67-aa91-c7652f64aa5d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.047601] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a88cd53-bfdb-4abc-b3e3-7a3a6c7ba61c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.058484] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1698.058637] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1698.058823] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleting the datastore file [datastore1] 1cf285cf-8b4c-4872-b179-72e38c0143e0 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1698.059764] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56df8379-df1b-45e6-a409-e074a8bdfafe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.063126] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02736d5c-6d7f-4054-b867-165bc6ca1965 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.108175] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e70303c-2862-40f3-8236-ca3f4553dfa0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.114049] env[62519]: DEBUG oslo_vmware.api [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1698.114049] env[62519]: value = "task-1802735" [ 1698.114049] env[62519]: _type = "Task" [ 1698.114049] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.119945] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdc3312-6105-44f1-a8b9-7a228edd4530 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.128720] env[62519]: DEBUG oslo_vmware.api [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802735, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.141679] env[62519]: DEBUG nova.compute.provider_tree [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1698.143284] env[62519]: INFO nova.compute.manager [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Shelve offloading [ 1698.190580] env[62519]: INFO nova.compute.manager [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Took 54.75 seconds to build instance. [ 1698.278179] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802733, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.290197} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.278462] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1698.279840] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9781d802-0904-4f34-9603-cf259ebef722 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.310464] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac/88f9351c-253b-49dd-a88e-b8585ea742ac.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1698.310740] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d16078c-9d02-4b1f-a38c-58dcca2f72c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.334876] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1698.334876] env[62519]: value = "task-1802736" [ 1698.334876] env[62519]: _type = "Task" [ 1698.334876] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.343400] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802736, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.614749] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1698.627531] env[62519]: DEBUG oslo_vmware.api [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802735, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1477} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.628070] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1698.629137] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1698.629497] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1698.629874] env[62519]: INFO nova.compute.manager [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1698.630161] env[62519]: DEBUG oslo.service.loopingcall [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1698.630360] env[62519]: DEBUG nova.compute.manager [-] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1698.630456] env[62519]: DEBUG nova.network.neutron [-] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1698.647090] env[62519]: DEBUG nova.scheduler.client.report [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1698.652370] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1698.652589] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1698.652741] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1698.652928] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1698.653072] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1698.653222] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1698.653429] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1698.653588] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1698.653750] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1698.653903] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1698.654140] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1698.654873] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1698.656760] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af26548d-2264-4ebf-8ea3-415e84b3f938 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.659969] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5826ab7-11bd-4632-956a-be335dc3d7e6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.674433] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73d43d1-f612-46a7-9a1f-8e794da1d395 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.679734] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1698.679734] env[62519]: value = "task-1802737" [ 1698.679734] env[62519]: _type = "Task" [ 1698.679734] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.701792] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1698.702056] env[62519]: DEBUG nova.compute.manager [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1698.702834] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00eaafb3-f399-4d92-bb9e-e41cc599f550 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.712123] env[62519]: DEBUG oslo_concurrency.lockutils [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.712302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.712474] env[62519]: DEBUG nova.network.neutron [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1698.847994] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802736, 'name': ReconfigVM_Task, 'duration_secs': 0.291506} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.848592] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac/88f9351c-253b-49dd-a88e-b8585ea742ac.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1698.850076] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c6bf282-1c01-470c-ba96-b37ea2506c18 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.861658] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1698.861658] env[62519]: value = "task-1802738" [ 1698.861658] env[62519]: _type = "Task" [ 1698.861658] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.874011] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802738, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.977240] env[62519]: DEBUG nova.compute.manager [req-baa9e00a-ba3b-4bb6-8e57-0903b0000bf7 req-e0b162fe-e7e1-44fe-b215-b4dc9cba2ab1 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Received event network-vif-deleted-fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1698.977463] env[62519]: INFO nova.compute.manager [req-baa9e00a-ba3b-4bb6-8e57-0903b0000bf7 req-e0b162fe-e7e1-44fe-b215-b4dc9cba2ab1 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Neutron deleted interface fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6; detaching it from the instance and deleting it from the info cache [ 1698.977731] env[62519]: DEBUG nova.network.neutron [req-baa9e00a-ba3b-4bb6-8e57-0903b0000bf7 req-e0b162fe-e7e1-44fe-b215-b4dc9cba2ab1 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.088022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "54a8aa34-1595-4494-ba68-6915611631ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.163430] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.163430] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1699.165278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.262s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.165472] env[62519]: DEBUG nova.objects.instance [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lazy-loading 'resources' on Instance uuid f59a31e4-7fb9-4de7-b35f-da811a305f85 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1699.264979] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Successfully updated port: ccc74c83-e994-4ae0-b4c0-92ba09a53009 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1699.374954] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802738, 'name': Rename_Task, 'duration_secs': 0.165597} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.375677] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1699.376076] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84d08153-35ca-4f60-a668-d2fdfc828e55 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.387485] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1699.387485] env[62519]: value = "task-1802739" [ 1699.387485] env[62519]: _type = "Task" [ 1699.387485] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.399782] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802739, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.451298] env[62519]: DEBUG nova.network.neutron [-] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.472951] env[62519]: DEBUG nova.network.neutron [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Updating instance_info_cache with network_info: [{"id": "3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3", "address": "fa:16:3e:5d:4c:fc", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d7bc0c1-8f", "ovs_interfaceid": "3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.480592] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49da5c21-045b-4289-be82-4d65637516a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.495234] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1640927b-c098-47f2-bea2-a4ca406affad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.534427] env[62519]: DEBUG nova.compute.manager [req-baa9e00a-ba3b-4bb6-8e57-0903b0000bf7 req-e0b162fe-e7e1-44fe-b215-b4dc9cba2ab1 service nova] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Detach interface failed, port_id=fdafb1fb-4c93-4644-a5a6-de52b8a4fbf6, reason: Instance 1cf285cf-8b4c-4872-b179-72e38c0143e0 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1699.669051] env[62519]: DEBUG nova.compute.utils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1699.670726] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1699.670918] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1699.697570] env[62519]: DEBUG oslo_concurrency.lockutils [None req-82cc34c3-57f7-4d42-986e-90dfdcc61974 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "54a8aa34-1595-4494-ba68-6915611631ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.268s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.698599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "54a8aa34-1595-4494-ba68-6915611631ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.610s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.698599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "54a8aa34-1595-4494-ba68-6915611631ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.698599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "54a8aa34-1595-4494-ba68-6915611631ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.698599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "54a8aa34-1595-4494-ba68-6915611631ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.706026] env[62519]: INFO nova.compute.manager [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Terminating instance [ 1699.717492] env[62519]: DEBUG nova.policy [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e80a7ce112e4087af60e7de4d915118', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1e17b34cbef497985ff79e77d5b0f89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1699.771357] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "refresh_cache-67424299-f100-49a1-ab73-0407b60a2d9f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.771357] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "refresh_cache-67424299-f100-49a1-ab73-0407b60a2d9f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.771357] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1699.899653] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802739, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.955294] env[62519]: INFO nova.compute.manager [-] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Took 1.32 seconds to deallocate network for instance. [ 1699.975534] env[62519]: DEBUG oslo_concurrency.lockutils [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.057896] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Successfully created port: eac73fb2-d38f-49e3-939e-36903535dfeb {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1700.161667] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a49d52-3749-45f2-8918-d10e6408fd15 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.170777] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7672f851-c180-4e17-93dc-fde51798d816 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.177675] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1700.208246] env[62519]: DEBUG nova.compute.manager [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1700.208470] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1700.210474] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4fafc5-9d14-462c-b7ec-ec61e3797b8c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.214109] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffd9e16-6ea0-4165-bb35-5afe4e09bf3c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.225143] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961dbebb-826d-4b0f-8df0-925937a8021a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.229124] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1700.229859] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-690b666d-6113-4c96-96af-14b93659a9c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.242440] env[62519]: DEBUG nova.compute.provider_tree [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1700.245088] env[62519]: DEBUG oslo_vmware.api [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1700.245088] env[62519]: value = "task-1802740" [ 1700.245088] env[62519]: _type = "Task" [ 1700.245088] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.254624] env[62519]: DEBUG oslo_vmware.api [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.401503] env[62519]: DEBUG oslo_vmware.api [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802739, 'name': PowerOnVM_Task, 'duration_secs': 0.703549} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.401803] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1700.402012] env[62519]: INFO nova.compute.manager [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Took 10.83 seconds to spawn the instance on the hypervisor. [ 1700.402215] env[62519]: DEBUG nova.compute.manager [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1700.403008] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69a21b1-262a-4dd6-9a8f-b9883448db76 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.467737] env[62519]: DEBUG oslo_concurrency.lockutils [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.521605] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1700.749019] env[62519]: DEBUG nova.scheduler.client.report [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1700.762550] env[62519]: DEBUG oslo_vmware.api [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802740, 'name': PowerOffVM_Task, 'duration_secs': 0.185965} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.762976] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1700.763257] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1700.763597] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69e334df-dcd8-48b0-9c19-94ac51172cee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.926074] env[62519]: INFO nova.compute.manager [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Took 45.37 seconds to build instance. [ 1700.934655] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1700.934655] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1700.934655] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleting the datastore file [datastore1] 54a8aa34-1595-4494-ba68-6915611631ce {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1700.934655] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77e33f9e-07eb-4a1d-a83a-87539eb14fb5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.947175] env[62519]: DEBUG oslo_vmware.api [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1700.947175] env[62519]: value = "task-1802742" [ 1700.947175] env[62519]: _type = "Task" [ 1700.947175] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.954065] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Updating instance_info_cache with network_info: [{"id": "ccc74c83-e994-4ae0-b4c0-92ba09a53009", "address": "fa:16:3e:d7:19:9c", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccc74c83-e9", "ovs_interfaceid": "ccc74c83-e994-4ae0-b4c0-92ba09a53009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.962221] env[62519]: DEBUG oslo_vmware.api [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.977247] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1700.977247] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb4c37c-1f72-4d13-bfe1-15ae1fe1fd9a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.986172] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1700.986172] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbe8f987-cb74-45d7-9655-f19c92438fe5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.061283] env[62519]: DEBUG nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Received event network-vif-plugged-ccc74c83-e994-4ae0-b4c0-92ba09a53009 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1701.061640] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Acquiring lock "67424299-f100-49a1-ab73-0407b60a2d9f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.062017] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Lock "67424299-f100-49a1-ab73-0407b60a2d9f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.062329] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Lock "67424299-f100-49a1-ab73-0407b60a2d9f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.063786] env[62519]: DEBUG nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] No waiting events found dispatching network-vif-plugged-ccc74c83-e994-4ae0-b4c0-92ba09a53009 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1701.063786] env[62519]: WARNING nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Received unexpected event network-vif-plugged-ccc74c83-e994-4ae0-b4c0-92ba09a53009 for instance with vm_state building and task_state spawning. [ 1701.063786] env[62519]: DEBUG nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Received event network-changed-ccc74c83-e994-4ae0-b4c0-92ba09a53009 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1701.063786] env[62519]: DEBUG nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Refreshing instance network info cache due to event network-changed-ccc74c83-e994-4ae0-b4c0-92ba09a53009. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1701.063786] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Acquiring lock "refresh_cache-67424299-f100-49a1-ab73-0407b60a2d9f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.174261] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1701.174541] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1701.176048] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleting the datastore file [datastore1] 40507d8c-8f30-45d4-9c65-03f8b1271afb {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1701.176048] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c35e9d3-463c-407f-ac8d-47326469eb86 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.185621] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1701.185621] env[62519]: value = "task-1802744" [ 1701.185621] env[62519]: _type = "Task" [ 1701.185621] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.198155] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.215650] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1701.251756] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1701.251756] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.251971] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1701.252254] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.252465] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1701.252708] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1701.253058] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1701.253308] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1701.253550] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1701.253778] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1701.254034] env[62519]: DEBUG nova.virt.hardware [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1701.255469] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273a14b7-f2dd-4ab1-8148-87c90bac9e96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.260496] env[62519]: DEBUG oslo_concurrency.lockutils [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.095s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.264500] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 32.991s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.274943] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3253590-7820-45cc-95ac-0563c6d10a92 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.298206] env[62519]: INFO nova.scheduler.client.report [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted allocations for instance f59a31e4-7fb9-4de7-b35f-da811a305f85 [ 1701.458433] env[62519]: DEBUG oslo_vmware.api [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389502} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.458485] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1701.458691] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1701.458869] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1701.459084] env[62519]: INFO nova.compute.manager [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1701.460202] env[62519]: DEBUG oslo.service.loopingcall [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1701.460411] env[62519]: DEBUG nova.compute.manager [-] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1701.460507] env[62519]: DEBUG nova.network.neutron [-] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1701.462334] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "refresh_cache-67424299-f100-49a1-ab73-0407b60a2d9f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.462669] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Instance network_info: |[{"id": "ccc74c83-e994-4ae0-b4c0-92ba09a53009", "address": "fa:16:3e:d7:19:9c", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccc74c83-e9", "ovs_interfaceid": "ccc74c83-e994-4ae0-b4c0-92ba09a53009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1701.462889] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Acquired lock "refresh_cache-67424299-f100-49a1-ab73-0407b60a2d9f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.463078] env[62519]: DEBUG nova.network.neutron [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Refreshing network info cache for port ccc74c83-e994-4ae0-b4c0-92ba09a53009 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1701.464302] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:19:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ccc74c83-e994-4ae0-b4c0-92ba09a53009', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1701.472671] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Creating folder: Project (f1e17b34cbef497985ff79e77d5b0f89). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1701.475626] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eded6829-1deb-4eea-bdab-2b40cdc4c716 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.489021] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Created folder: Project (f1e17b34cbef497985ff79e77d5b0f89) in parent group-v373567. [ 1701.489021] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Creating folder: Instances. Parent ref: group-v373762. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1701.489286] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b68e2c66-136b-4446-89cb-94a9f34ec685 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.500299] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Created folder: Instances in parent group-v373762. [ 1701.500570] env[62519]: DEBUG oslo.service.loopingcall [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1701.500812] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1701.501035] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-063251dc-df0a-47f6-b3aa-50cc7fe26905 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.528944] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1701.528944] env[62519]: value = "task-1802747" [ 1701.528944] env[62519]: _type = "Task" [ 1701.528944] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.539739] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802747, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.697366] env[62519]: DEBUG oslo_vmware.api [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156717} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.700066] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1701.700303] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1701.700490] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1701.728396] env[62519]: INFO nova.scheduler.client.report [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted allocations for instance 40507d8c-8f30-45d4-9c65-03f8b1271afb [ 1701.770057] env[62519]: INFO nova.compute.claims [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1701.805719] env[62519]: DEBUG oslo_concurrency.lockutils [None req-952776ea-e15a-49a8-9654-d3d8b8b5d6b1 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "f59a31e4-7fb9-4de7-b35f-da811a305f85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.445s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.040122] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802747, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.113350] env[62519]: DEBUG nova.network.neutron [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Updated VIF entry in instance network info cache for port ccc74c83-e994-4ae0-b4c0-92ba09a53009. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1702.113475] env[62519]: DEBUG nova.network.neutron [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Updating instance_info_cache with network_info: [{"id": "ccc74c83-e994-4ae0-b4c0-92ba09a53009", "address": "fa:16:3e:d7:19:9c", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccc74c83-e9", "ovs_interfaceid": "ccc74c83-e994-4ae0-b4c0-92ba09a53009", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.118526] env[62519]: DEBUG oslo_concurrency.lockutils [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "099112ae-569b-4853-bc47-b0b8b97d2525" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.118873] env[62519]: DEBUG oslo_concurrency.lockutils [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "099112ae-569b-4853-bc47-b0b8b97d2525" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.119557] env[62519]: DEBUG oslo_concurrency.lockutils [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "099112ae-569b-4853-bc47-b0b8b97d2525-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.119557] env[62519]: DEBUG oslo_concurrency.lockutils [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "099112ae-569b-4853-bc47-b0b8b97d2525-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.119557] env[62519]: DEBUG oslo_concurrency.lockutils [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "099112ae-569b-4853-bc47-b0b8b97d2525-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.121645] env[62519]: INFO nova.compute.manager [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Terminating instance [ 1702.233415] env[62519]: DEBUG oslo_concurrency.lockutils [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.278405] env[62519]: INFO nova.compute.resource_tracker [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating resource usage from migration 7e488268-8aed-49a2-a0b0-dbd98ced6c2b [ 1702.434562] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9fb41100-265a-40a3-9269-3fe1f1b74f74 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.899s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.513617] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Successfully updated port: eac73fb2-d38f-49e3-939e-36903535dfeb {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1702.547962] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802747, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.615990] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Releasing lock "refresh_cache-67424299-f100-49a1-ab73-0407b60a2d9f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.618679] env[62519]: DEBUG nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Received event network-vif-unplugged-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1702.618679] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Acquiring lock "40507d8c-8f30-45d4-9c65-03f8b1271afb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.618679] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.618679] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.618679] env[62519]: DEBUG nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] No waiting events found dispatching network-vif-unplugged-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1702.618679] env[62519]: WARNING nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Received unexpected event network-vif-unplugged-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 for instance with vm_state shelved and task_state shelving_offloading. [ 1702.618679] env[62519]: DEBUG nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Received event network-changed-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1702.618679] env[62519]: DEBUG nova.compute.manager [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Refreshing instance network info cache due to event network-changed-3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1702.618679] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Acquiring lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.618679] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Acquired lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.618679] env[62519]: DEBUG nova.network.neutron [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Refreshing network info cache for port 3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1702.625790] env[62519]: DEBUG nova.compute.manager [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1702.625880] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1702.627872] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9530a27f-96a6-422c-80c4-1453082851bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.636851] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1702.639501] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff341636-12e6-4480-bb4e-15ea696302f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.648014] env[62519]: DEBUG oslo_vmware.api [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1702.648014] env[62519]: value = "task-1802748" [ 1702.648014] env[62519]: _type = "Task" [ 1702.648014] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.657557] env[62519]: DEBUG oslo_vmware.api [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802748, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.699565] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b669af2-9b3a-4909-885d-ecc2f85983f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.708179] env[62519]: DEBUG nova.network.neutron [-] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.710584] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db550830-43cf-4b7d-9019-f388d8918591 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.748350] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f83816b-6327-4424-b239-0ae38721fe72 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.759623] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321c79b5-ff14-46ba-b2b5-5bb855b74bcd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.776967] env[62519]: DEBUG nova.compute.provider_tree [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.022978] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "refresh_cache-83a680b9-0c2d-4231-9ddf-9aa90209c620" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.022978] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "refresh_cache-83a680b9-0c2d-4231-9ddf-9aa90209c620" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.022978] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1703.045617] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802747, 'name': CreateVM_Task, 'duration_secs': 1.399097} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.045617] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1703.046225] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.046576] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.046723] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1703.046991] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d33710fb-f4cb-4b3c-b8eb-2be92fa47a23 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.053208] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1703.053208] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52908d72-01f7-1729-41ea-e3f8d30ca40d" [ 1703.053208] env[62519]: _type = "Task" [ 1703.053208] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.063796] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52908d72-01f7-1729-41ea-e3f8d30ca40d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.160447] env[62519]: DEBUG oslo_vmware.api [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802748, 'name': PowerOffVM_Task, 'duration_secs': 0.409028} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.160837] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1703.161099] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1703.161447] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a01fa598-75c6-402e-a2bd-34ad5d568fb1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.179068] env[62519]: DEBUG nova.compute.manager [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Received event network-vif-deleted-eeb6f2c0-a95e-455a-8e67-ffa5b2c13e53 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1703.179398] env[62519]: DEBUG nova.compute.manager [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Received event network-vif-plugged-eac73fb2-d38f-49e3-939e-36903535dfeb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1703.179547] env[62519]: DEBUG oslo_concurrency.lockutils [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] Acquiring lock "83a680b9-0c2d-4231-9ddf-9aa90209c620-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.179803] env[62519]: DEBUG oslo_concurrency.lockutils [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] Lock "83a680b9-0c2d-4231-9ddf-9aa90209c620-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.179997] env[62519]: DEBUG oslo_concurrency.lockutils [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] Lock "83a680b9-0c2d-4231-9ddf-9aa90209c620-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.180221] env[62519]: DEBUG nova.compute.manager [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] No waiting events found dispatching network-vif-plugged-eac73fb2-d38f-49e3-939e-36903535dfeb {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1703.180613] env[62519]: WARNING nova.compute.manager [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Received unexpected event network-vif-plugged-eac73fb2-d38f-49e3-939e-36903535dfeb for instance with vm_state building and task_state spawning. [ 1703.180613] env[62519]: DEBUG nova.compute.manager [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Received event network-changed-f0eeb5fa-bf84-45a4-a90f-85d593feed7e {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1703.180613] env[62519]: DEBUG nova.compute.manager [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Refreshing instance network info cache due to event network-changed-f0eeb5fa-bf84-45a4-a90f-85d593feed7e. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1703.180738] env[62519]: DEBUG oslo_concurrency.lockutils [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] Acquiring lock "refresh_cache-88f9351c-253b-49dd-a88e-b8585ea742ac" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.180928] env[62519]: DEBUG oslo_concurrency.lockutils [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] Acquired lock "refresh_cache-88f9351c-253b-49dd-a88e-b8585ea742ac" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.181041] env[62519]: DEBUG nova.network.neutron [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Refreshing network info cache for port f0eeb5fa-bf84-45a4-a90f-85d593feed7e {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1703.216549] env[62519]: INFO nova.compute.manager [-] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Took 1.76 seconds to deallocate network for instance. [ 1703.279737] env[62519]: DEBUG nova.scheduler.client.report [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1703.412924] env[62519]: DEBUG nova.network.neutron [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Updated VIF entry in instance network info cache for port 3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1703.413661] env[62519]: DEBUG nova.network.neutron [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Updating instance_info_cache with network_info: [{"id": "3d7bc0c1-8f36-42c7-b25f-89e8c729e3a3", "address": "fa:16:3e:5d:4c:fc", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": null, "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap3d7bc0c1-8f", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.526057] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "40507d8c-8f30-45d4-9c65-03f8b1271afb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.564799] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52908d72-01f7-1729-41ea-e3f8d30ca40d, 'name': SearchDatastore_Task, 'duration_secs': 0.015092} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.565115] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.565347] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1703.565572] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.565714] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.566009] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1703.566282] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfdeba4c-a4c9-483f-8207-6ac2ee3c9036 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.568956] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1703.724263] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.739679] env[62519]: DEBUG nova.network.neutron [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Updating instance_info_cache with network_info: [{"id": "eac73fb2-d38f-49e3-939e-36903535dfeb", "address": "fa:16:3e:96:e6:52", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeac73fb2-d3", "ovs_interfaceid": "eac73fb2-d38f-49e3-939e-36903535dfeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.784252] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.520s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.784569] env[62519]: INFO nova.compute.manager [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Migrating [ 1703.791245] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.379s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.791385] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.791539] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1703.791826] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.600s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.793214] env[62519]: INFO nova.compute.claims [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1703.800409] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4e59c4-5838-4037-a9a9-68233ca979a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.814599] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb8385e-155c-4d81-8def-910f91f179d9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.834265] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13093a4-62b7-4fce-9a54-ec91d0872c0f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.843901] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f91b9a-0cbb-4b66-87db-a7823f0ff0af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.876063] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177777MB free_disk=157GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1703.876218] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.918642] env[62519]: DEBUG oslo_concurrency.lockutils [req-a6b8bb68-7206-4028-a9ed-346238f38eba req-edb9c5e2-af84-4b15-b280-c27b836c3e0f service nova] Releasing lock "refresh_cache-40507d8c-8f30-45d4-9c65-03f8b1271afb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.946131] env[62519]: DEBUG nova.network.neutron [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Updated VIF entry in instance network info cache for port f0eeb5fa-bf84-45a4-a90f-85d593feed7e. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1703.946614] env[62519]: DEBUG nova.network.neutron [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Updating instance_info_cache with network_info: [{"id": "f0eeb5fa-bf84-45a4-a90f-85d593feed7e", "address": "fa:16:3e:f5:9b:e3", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0eeb5fa-bf", "ovs_interfaceid": "f0eeb5fa-bf84-45a4-a90f-85d593feed7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.987686] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1703.988192] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1703.989342] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-068a8a84-852f-4a94-a34f-ccacfb5357e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.997208] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1703.997208] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5234fad5-c63a-d39a-f4c0-3d1821b7f09f" [ 1703.997208] env[62519]: _type = "Task" [ 1703.997208] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.007446] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5234fad5-c63a-d39a-f4c0-3d1821b7f09f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.032931] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1704.032931] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1704.032931] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleting the datastore file [datastore1] 099112ae-569b-4853-bc47-b0b8b97d2525 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1704.033180] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e97cb7f-3009-4f0f-b4f8-3a88abf675e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.040250] env[62519]: DEBUG oslo_vmware.api [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1704.040250] env[62519]: value = "task-1802750" [ 1704.040250] env[62519]: _type = "Task" [ 1704.040250] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.048997] env[62519]: DEBUG oslo_vmware.api [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.242176] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "refresh_cache-83a680b9-0c2d-4231-9ddf-9aa90209c620" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.242542] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Instance network_info: |[{"id": "eac73fb2-d38f-49e3-939e-36903535dfeb", "address": "fa:16:3e:96:e6:52", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeac73fb2-d3", "ovs_interfaceid": "eac73fb2-d38f-49e3-939e-36903535dfeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1704.242961] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:e6:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eac73fb2-d38f-49e3-939e-36903535dfeb', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1704.250768] env[62519]: DEBUG oslo.service.loopingcall [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.250991] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1704.251230] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a396451-24d8-4a76-838e-9c0df8ffcc25 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.273220] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1704.273220] env[62519]: value = "task-1802751" [ 1704.273220] env[62519]: _type = "Task" [ 1704.273220] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.281670] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802751, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.313286] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.313560] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.313758] env[62519]: DEBUG nova.network.neutron [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1704.452055] env[62519]: DEBUG oslo_concurrency.lockutils [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] Releasing lock "refresh_cache-88f9351c-253b-49dd-a88e-b8585ea742ac" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.452055] env[62519]: DEBUG nova.compute.manager [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Received event network-changed-eac73fb2-d38f-49e3-939e-36903535dfeb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1704.452367] env[62519]: DEBUG nova.compute.manager [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Refreshing instance network info cache due to event network-changed-eac73fb2-d38f-49e3-939e-36903535dfeb. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1704.452636] env[62519]: DEBUG oslo_concurrency.lockutils [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] Acquiring lock "refresh_cache-83a680b9-0c2d-4231-9ddf-9aa90209c620" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.452837] env[62519]: DEBUG oslo_concurrency.lockutils [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] Acquired lock "refresh_cache-83a680b9-0c2d-4231-9ddf-9aa90209c620" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.453067] env[62519]: DEBUG nova.network.neutron [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Refreshing network info cache for port eac73fb2-d38f-49e3-939e-36903535dfeb {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1704.509774] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5234fad5-c63a-d39a-f4c0-3d1821b7f09f, 'name': SearchDatastore_Task, 'duration_secs': 0.035776} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.510574] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-510d5064-20ab-48d0-a64c-53a6f711f48b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.516210] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1704.516210] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5210b643-8c19-bd65-13e0-e23d3d6861ac" [ 1704.516210] env[62519]: _type = "Task" [ 1704.516210] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.525561] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5210b643-8c19-bd65-13e0-e23d3d6861ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.551401] env[62519]: DEBUG oslo_vmware.api [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169815} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.551748] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1704.551946] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1704.552137] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1704.552312] env[62519]: INFO nova.compute.manager [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Took 1.93 seconds to destroy the instance on the hypervisor. [ 1704.552567] env[62519]: DEBUG oslo.service.loopingcall [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.552768] env[62519]: DEBUG nova.compute.manager [-] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1704.552894] env[62519]: DEBUG nova.network.neutron [-] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1704.783610] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802751, 'name': CreateVM_Task, 'duration_secs': 0.395156} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.783790] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1704.784582] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.784785] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.785066] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1704.785326] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9982fb9-7084-4ddc-8f4a-614f2028143f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.790734] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1704.790734] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]528e9379-a516-4bea-4a55-e7407ac7938e" [ 1704.790734] env[62519]: _type = "Task" [ 1704.790734] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.800831] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528e9379-a516-4bea-4a55-e7407ac7938e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.027520] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5210b643-8c19-bd65-13e0-e23d3d6861ac, 'name': SearchDatastore_Task, 'duration_secs': 0.010799} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.027856] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.028210] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 67424299-f100-49a1-ab73-0407b60a2d9f/67424299-f100-49a1-ab73-0407b60a2d9f.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1705.028516] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-441d2708-8a8c-476a-b217-b5240829e302 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.038497] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1705.038497] env[62519]: value = "task-1802752" [ 1705.038497] env[62519]: _type = "Task" [ 1705.038497] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.046960] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.059203] env[62519]: DEBUG nova.network.neutron [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance_info_cache with network_info: [{"id": "925db578-876c-414b-8500-56c73c7cdfe8", "address": "fa:16:3e:d9:6c:87", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925db578-87", "ovs_interfaceid": "925db578-876c-414b-8500-56c73c7cdfe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.147718] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb305c7d-212a-4507-b24a-6969c8a727f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.157784] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ca6985-6110-44a4-9884-a2b558329b7d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.187863] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1589566c-f01e-4837-a87a-6a16eb9476cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.191722] env[62519]: DEBUG nova.network.neutron [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Updated VIF entry in instance network info cache for port eac73fb2-d38f-49e3-939e-36903535dfeb. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1705.192065] env[62519]: DEBUG nova.network.neutron [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Updating instance_info_cache with network_info: [{"id": "eac73fb2-d38f-49e3-939e-36903535dfeb", "address": "fa:16:3e:96:e6:52", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeac73fb2-d3", "ovs_interfaceid": "eac73fb2-d38f-49e3-939e-36903535dfeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.196739] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5db4ce-4565-4bda-932b-3594d6f8032a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.210467] env[62519]: DEBUG nova.compute.provider_tree [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1705.213600] env[62519]: DEBUG nova.compute.manager [req-699a58ae-2ab0-496a-87a7-ef42dd54ad9b req-499c58f5-b2ee-42ff-9201-eef5107fa1e7 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Received event network-vif-deleted-18c7d589-708e-4a8d-b1f1-c646d532c6aa {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1705.213789] env[62519]: INFO nova.compute.manager [req-699a58ae-2ab0-496a-87a7-ef42dd54ad9b req-499c58f5-b2ee-42ff-9201-eef5107fa1e7 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Neutron deleted interface 18c7d589-708e-4a8d-b1f1-c646d532c6aa; detaching it from the instance and deleting it from the info cache [ 1705.213957] env[62519]: DEBUG nova.network.neutron [req-699a58ae-2ab0-496a-87a7-ef42dd54ad9b req-499c58f5-b2ee-42ff-9201-eef5107fa1e7 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.296751] env[62519]: DEBUG nova.network.neutron [-] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.302034] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528e9379-a516-4bea-4a55-e7407ac7938e, 'name': SearchDatastore_Task, 'duration_secs': 0.010492} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.302687] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.302990] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1705.303265] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.303476] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.303613] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1705.303908] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c50461f7-eef0-4b31-abba-86ca6c7f7a30 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.324853] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1705.325134] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1705.326371] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4307a881-c63b-43e3-9d72-afb64041cdd4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.334168] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1705.334168] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f77a4b-fde2-0053-8ce6-0d23faa9b5b2" [ 1705.334168] env[62519]: _type = "Task" [ 1705.334168] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.344577] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f77a4b-fde2-0053-8ce6-0d23faa9b5b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.550026] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802752, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.562893] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.696133] env[62519]: DEBUG oslo_concurrency.lockutils [req-eca7a037-a86e-4fdd-9734-df3b5db9a2c7 req-d3972730-5851-465d-9dd9-d9c6f3e80381 service nova] Releasing lock "refresh_cache-83a680b9-0c2d-4231-9ddf-9aa90209c620" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.718255] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a0b0520-a751-4fc6-8c26-1ca8a43200e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.728476] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e402d1-8968-43c8-91b4-f096d8800b17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.739266] env[62519]: ERROR nova.scheduler.client.report [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [req-ebad2a65-9cdd-42ad-948d-b12142a1f4e8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ebad2a65-9cdd-42ad-948d-b12142a1f4e8"}]} [ 1705.765618] env[62519]: DEBUG nova.compute.manager [req-699a58ae-2ab0-496a-87a7-ef42dd54ad9b req-499c58f5-b2ee-42ff-9201-eef5107fa1e7 service nova] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Detach interface failed, port_id=18c7d589-708e-4a8d-b1f1-c646d532c6aa, reason: Instance 099112ae-569b-4853-bc47-b0b8b97d2525 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1705.766704] env[62519]: DEBUG nova.scheduler.client.report [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1705.779807] env[62519]: DEBUG nova.scheduler.client.report [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1705.780027] env[62519]: DEBUG nova.compute.provider_tree [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1705.791281] env[62519]: DEBUG nova.scheduler.client.report [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1705.803686] env[62519]: INFO nova.compute.manager [-] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Took 1.25 seconds to deallocate network for instance. [ 1705.811167] env[62519]: DEBUG nova.scheduler.client.report [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1705.846919] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f77a4b-fde2-0053-8ce6-0d23faa9b5b2, 'name': SearchDatastore_Task, 'duration_secs': 0.068949} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.847922] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99c44bab-3fb1-441b-914c-f040bdff4b97 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.854412] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1705.854412] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525a190c-d82f-3c8a-0b97-b0cc5d28d023" [ 1705.854412] env[62519]: _type = "Task" [ 1705.854412] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.862610] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525a190c-d82f-3c8a-0b97-b0cc5d28d023, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.053950] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802752, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529242} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.054264] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 67424299-f100-49a1-ab73-0407b60a2d9f/67424299-f100-49a1-ab73-0407b60a2d9f.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1706.054481] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1706.054818] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e013dc7-6b1a-45ee-9e28-c2da644bf93a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.064626] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1706.064626] env[62519]: value = "task-1802753" [ 1706.064626] env[62519]: _type = "Task" [ 1706.064626] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.079118] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802753, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.096991] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4e5cb4-19a0-45f5-b392-47cdc714a0c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.105527] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593f2c0c-2e65-454d-9681-5374b0538761 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.140055] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dad19d4-f8f1-4051-a4a0-449b8f62b48f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.148370] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad60ca57-1e2c-4632-b205-9f96875c10a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.163450] env[62519]: DEBUG nova.compute.provider_tree [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1706.310425] env[62519]: DEBUG oslo_concurrency.lockutils [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.364818] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525a190c-d82f-3c8a-0b97-b0cc5d28d023, 'name': SearchDatastore_Task, 'duration_secs': 0.010071} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.365097] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.365350] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 83a680b9-0c2d-4231-9ddf-9aa90209c620/83a680b9-0c2d-4231-9ddf-9aa90209c620.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1706.365604] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9181ec3-02be-493e-ba4a-c2c52d645584 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.372469] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1706.372469] env[62519]: value = "task-1802754" [ 1706.372469] env[62519]: _type = "Task" [ 1706.372469] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.380325] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802754, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.581778] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802753, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07304} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.581778] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1706.582500] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785ae184-845b-408b-afde-b57680d61d43 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.609330] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 67424299-f100-49a1-ab73-0407b60a2d9f/67424299-f100-49a1-ab73-0407b60a2d9f.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1706.610082] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eacd68dc-26f8-4c8f-bf87-a41cc40436db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.633090] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1706.633090] env[62519]: value = "task-1802755" [ 1706.633090] env[62519]: _type = "Task" [ 1706.633090] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.643096] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802755, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.698354] env[62519]: DEBUG nova.scheduler.client.report [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 101 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1706.698595] env[62519]: DEBUG nova.compute.provider_tree [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 101 to 102 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1706.698769] env[62519]: DEBUG nova.compute.provider_tree [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1706.883486] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802754, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.087132] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e576743c-0d44-4684-9707-5a7f5d02a94b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.106084] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance 'ad0af10d-5063-4344-b12f-1d3ee9ea1090' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1707.144045] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802755, 'name': ReconfigVM_Task, 'duration_secs': 0.467923} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.144251] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 67424299-f100-49a1-ab73-0407b60a2d9f/67424299-f100-49a1-ab73-0407b60a2d9f.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1707.144951] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb1cb4fa-8ece-41c0-8a13-1d749a872836 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.153130] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1707.153130] env[62519]: value = "task-1802756" [ 1707.153130] env[62519]: _type = "Task" [ 1707.153130] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.165973] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802756, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.204030] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.412s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.205053] env[62519]: DEBUG nova.compute.manager [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1707.207422] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.946s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.207633] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.209838] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.038s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.211516] env[62519]: INFO nova.compute.claims [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1707.239348] env[62519]: INFO nova.scheduler.client.report [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Deleted allocations for instance a1551278-a306-4534-8d8d-3b3a003dde04 [ 1707.384329] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802754, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511786} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.384545] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 83a680b9-0c2d-4231-9ddf-9aa90209c620/83a680b9-0c2d-4231-9ddf-9aa90209c620.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1707.384767] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1707.385120] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c7065bb-9242-41e9-8cae-5d505cde1847 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.394266] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1707.394266] env[62519]: value = "task-1802757" [ 1707.394266] env[62519]: _type = "Task" [ 1707.394266] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.404822] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802757, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.612853] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1707.613310] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85645fda-2bd4-49c3-a1e2-57c3cbfe6da3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.623180] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1707.623180] env[62519]: value = "task-1802758" [ 1707.623180] env[62519]: _type = "Task" [ 1707.623180] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.634418] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802758, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.665017] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802756, 'name': Rename_Task, 'duration_secs': 0.366663} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.665329] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1707.665584] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9d6327a-590a-4e37-807d-720bb133a5bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.673861] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1707.673861] env[62519]: value = "task-1802759" [ 1707.673861] env[62519]: _type = "Task" [ 1707.673861] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.683275] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802759, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.716512] env[62519]: DEBUG nova.compute.utils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1707.719923] env[62519]: DEBUG nova.compute.manager [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Not allocating networking since 'none' was specified. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1985}} [ 1707.750266] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d88a191-b673-4e36-8aa0-9caa075c1094 tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "a1551278-a306-4534-8d8d-3b3a003dde04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.014s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.906215] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802757, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.269323} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.906646] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1707.907386] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111d78ee-db50-4395-9da5-d874337be71c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.931805] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 83a680b9-0c2d-4231-9ddf-9aa90209c620/83a680b9-0c2d-4231-9ddf-9aa90209c620.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1707.932154] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9b4f191-e0df-4934-84b3-6acb23f204cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.954514] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1707.954514] env[62519]: value = "task-1802760" [ 1707.954514] env[62519]: _type = "Task" [ 1707.954514] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.963428] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802760, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.134364] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802758, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.189674] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802759, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.221745] env[62519]: DEBUG nova.compute.manager [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1708.467340] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802760, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.589953] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc1507a-07c9-4318-919a-79c8f25212ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.599022] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107b3ccf-73d3-4b81-a431-c925061023cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.635774] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362f7414-f0fa-46e9-b614-1315de83d885 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.646222] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802758, 'name': PowerOffVM_Task, 'duration_secs': 0.566696} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.647446] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06600f8-0154-41ff-9981-736bc728b86b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.651388] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1708.651630] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance 'ad0af10d-5063-4344-b12f-1d3ee9ea1090' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1708.667817] env[62519]: DEBUG nova.compute.provider_tree [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1708.686487] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802759, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.973681] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802760, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.159618] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1709.159941] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1709.160120] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1709.160361] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1709.160517] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1709.160705] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1709.160941] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1709.161121] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1709.161371] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1709.161572] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1709.161761] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1709.167851] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccc1b551-6c8b-44fe-a220-7173f04dd699 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.197047] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802759, 'name': PowerOnVM_Task, 'duration_secs': 1.42463} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.198340] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1709.198553] env[62519]: INFO nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Took 10.58 seconds to spawn the instance on the hypervisor. [ 1709.198731] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1709.199079] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1709.199079] env[62519]: value = "task-1802761" [ 1709.199079] env[62519]: _type = "Task" [ 1709.199079] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.199768] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7872cf-1209-4f0b-9782-561d87a7a2ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.216790] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802761, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.217972] env[62519]: DEBUG nova.scheduler.client.report [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1709.218252] env[62519]: DEBUG nova.compute.provider_tree [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 102 to 103 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1709.218436] env[62519]: DEBUG nova.compute.provider_tree [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1709.235292] env[62519]: DEBUG nova.compute.manager [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1709.265410] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1709.265648] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1709.265801] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1709.266015] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1709.266184] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1709.266328] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1709.266529] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1709.266688] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1709.266853] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1709.267040] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1709.267235] env[62519]: DEBUG nova.virt.hardware [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1709.268093] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1452dd80-ea22-4613-834e-6e1b7a390238 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.276353] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714a0ed1-255b-4006-856f-31d44d1314cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.290797] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1709.296231] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Creating folder: Project (c84471059b83435eb604d7dcdccaec10). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1709.296524] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f800820a-c81b-4da9-935f-83f39f04016b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.308626] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Created folder: Project (c84471059b83435eb604d7dcdccaec10) in parent group-v373567. [ 1709.308819] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Creating folder: Instances. Parent ref: group-v373766. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1709.309066] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eaddee56-5af1-4c76-b4f9-40a46347a0df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.319590] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Created folder: Instances in parent group-v373766. [ 1709.319832] env[62519]: DEBUG oslo.service.loopingcall [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1709.320070] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1709.320310] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8509456b-c5e5-4b45-85ea-38e63f032a29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.338876] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1709.338876] env[62519]: value = "task-1802764" [ 1709.338876] env[62519]: _type = "Task" [ 1709.338876] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.352613] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802764, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.387049] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "fe350d30-6fbd-4813-9634-ed05984fecfd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.387376] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.387592] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "fe350d30-6fbd-4813-9634-ed05984fecfd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.387774] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.388036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.390406] env[62519]: INFO nova.compute.manager [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Terminating instance [ 1709.467127] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802760, 'name': ReconfigVM_Task, 'duration_secs': 1.451886} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.467450] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 83a680b9-0c2d-4231-9ddf-9aa90209c620/83a680b9-0c2d-4231-9ddf-9aa90209c620.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1709.468156] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1abb2911-0f6b-483f-adc6-2f269169fa29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.474642] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1709.474642] env[62519]: value = "task-1802765" [ 1709.474642] env[62519]: _type = "Task" [ 1709.474642] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.482956] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802765, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.712304] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802761, 'name': ReconfigVM_Task, 'duration_secs': 0.18037} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.712616] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance 'ad0af10d-5063-4344-b12f-1d3ee9ea1090' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1709.723446] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.723889] env[62519]: DEBUG nova.compute.manager [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1709.726210] env[62519]: INFO nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Took 44.09 seconds to build instance. [ 1709.727012] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.528s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.727233] env[62519]: DEBUG nova.objects.instance [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lazy-loading 'resources' on Instance uuid c61c893f-826b-4874-b253-de6fbffa9e5a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1709.849893] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802764, 'name': CreateVM_Task, 'duration_secs': 0.333518} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.850130] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1709.850521] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.850694] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.850993] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1709.851283] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7ee7c1e-bbeb-42fe-8b30-0d5e0b65da7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.856206] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1709.856206] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ca5994-9613-e593-b34c-6ab0fe3d5485" [ 1709.856206] env[62519]: _type = "Task" [ 1709.856206] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.864042] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ca5994-9613-e593-b34c-6ab0fe3d5485, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.894636] env[62519]: DEBUG nova.compute.manager [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1709.894944] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1709.895996] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb6bd98-8bcc-499b-a836-fe39221514b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.905334] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1709.905581] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5247c0d4-343e-428e-84db-6cbcf674bc16 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.912139] env[62519]: DEBUG oslo_vmware.api [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1709.912139] env[62519]: value = "task-1802766" [ 1709.912139] env[62519]: _type = "Task" [ 1709.912139] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.923665] env[62519]: DEBUG oslo_vmware.api [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.984976] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802765, 'name': Rename_Task, 'duration_secs': 0.180622} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.985313] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1709.985537] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3bc6fedb-7229-4e88-bd63-2891a97d16ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.992805] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1709.992805] env[62519]: value = "task-1802767" [ 1709.992805] env[62519]: _type = "Task" [ 1709.992805] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.000536] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.221394] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1710.221528] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1710.221681] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1710.221863] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1710.222015] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1710.222172] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1710.222381] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1710.222538] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1710.222700] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1710.222861] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1710.223243] env[62519]: DEBUG nova.virt.hardware [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1710.229041] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Reconfiguring VM instance instance-00000041 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1710.230922] env[62519]: DEBUG nova.compute.utils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1710.234462] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e50ff69-b21a-4eaf-aa72-84e983f39d52 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.249569] env[62519]: DEBUG nova.compute.manager [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1710.253029] env[62519]: DEBUG nova.compute.manager [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1710.253209] env[62519]: DEBUG nova.network.neutron [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1710.264101] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1710.264101] env[62519]: value = "task-1802768" [ 1710.264101] env[62519]: _type = "Task" [ 1710.264101] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.278790] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802768, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.317079] env[62519]: DEBUG nova.policy [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa89633a772e49bcbe2162cabf87d7cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7ccf89f05646414a8c8517f744692af3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1710.370026] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ca5994-9613-e593-b34c-6ab0fe3d5485, 'name': SearchDatastore_Task, 'duration_secs': 0.015822} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.370364] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.370609] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1710.370928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1710.370991] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1710.371218] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1710.371609] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-870a268e-bcf5-410b-b6fd-7192cac6006c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.383088] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1710.383373] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1710.384192] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a1f62e5-ebf4-49d0-9588-a5c881945f41 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.390163] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1710.390163] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521ed2e7-48f4-0bd3-678c-4a2b91bb185e" [ 1710.390163] env[62519]: _type = "Task" [ 1710.390163] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.402360] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521ed2e7-48f4-0bd3-678c-4a2b91bb185e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.425250] env[62519]: DEBUG oslo_vmware.api [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802766, 'name': PowerOffVM_Task, 'duration_secs': 0.180996} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.425585] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1710.425714] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1710.425937] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-899f4acb-75e5-4134-bad8-66439711133b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.506333] env[62519]: DEBUG oslo_vmware.api [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802767, 'name': PowerOnVM_Task, 'duration_secs': 0.470601} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.506688] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1710.507017] env[62519]: INFO nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Took 9.29 seconds to spawn the instance on the hypervisor. [ 1710.507281] env[62519]: DEBUG nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1710.508141] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0421adde-91cf-424b-ad11-365b849f6e01 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.512214] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1710.512474] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1710.512607] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Deleting the datastore file [datastore1] fe350d30-6fbd-4813-9634-ed05984fecfd {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1710.513243] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8010435-1406-4d70-acc3-24cdb4cdc7b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.524752] env[62519]: DEBUG oslo_vmware.api [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for the task: (returnval){ [ 1710.524752] env[62519]: value = "task-1802770" [ 1710.524752] env[62519]: _type = "Task" [ 1710.524752] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.535580] env[62519]: DEBUG oslo_vmware.api [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802770, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.596592] env[62519]: DEBUG nova.network.neutron [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Successfully created port: c449f90e-6bf3-4dd2-a762-fcbf43301c6f {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1710.623577] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b5b97d-ab67-4e2e-97ec-e47e0f8a9b18 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.632131] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc00248-85bb-4397-a98c-f714cbea81b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.664234] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbd8fd3-5030-4892-968b-5b5ca8eace6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.672657] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586032c0-b80e-48b1-aad2-9084fb78d672 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.686951] env[62519]: DEBUG nova.compute.provider_tree [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1710.756346] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "67424299-f100-49a1-ab73-0407b60a2d9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.127s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.778427] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802768, 'name': ReconfigVM_Task, 'duration_secs': 0.189026} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.779049] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Reconfigured VM instance instance-00000041 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1710.779966] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a02105-52c6-4540-a220-f9e11d725774 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.806171] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] ad0af10d-5063-4344-b12f-1d3ee9ea1090/ad0af10d-5063-4344-b12f-1d3ee9ea1090.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1710.806171] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fc11a41-68d8-447e-8165-428c2786ab6a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.827290] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1710.827290] env[62519]: value = "task-1802771" [ 1710.827290] env[62519]: _type = "Task" [ 1710.827290] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.836333] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802771, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.902250] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521ed2e7-48f4-0bd3-678c-4a2b91bb185e, 'name': SearchDatastore_Task, 'duration_secs': 0.016148} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.902870] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e729634-103e-4af0-b2ed-f5e024a30ad2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.909846] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1710.909846] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523314bc-839c-f28e-3120-8f880ba8e946" [ 1710.909846] env[62519]: _type = "Task" [ 1710.909846] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.920027] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523314bc-839c-f28e-3120-8f880ba8e946, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.033603] env[62519]: INFO nova.compute.manager [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Took 45.34 seconds to build instance. [ 1711.039713] env[62519]: DEBUG oslo_vmware.api [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Task: {'id': task-1802770, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377599} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.040293] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1711.040540] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1711.040724] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1711.040899] env[62519]: INFO nova.compute.manager [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1711.041184] env[62519]: DEBUG oslo.service.loopingcall [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1711.041530] env[62519]: DEBUG nova.compute.manager [-] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1711.041647] env[62519]: DEBUG nova.network.neutron [-] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1711.190684] env[62519]: DEBUG nova.scheduler.client.report [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1711.263130] env[62519]: DEBUG nova.compute.manager [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1711.315623] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1711.315843] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1711.321018] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1711.321018] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1711.321018] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1711.321018] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1711.321018] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1711.321018] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1711.321018] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1711.321018] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1711.321018] env[62519]: DEBUG nova.virt.hardware [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1711.321018] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71be09ec-8e6e-49b8-9725-90e0198fd700 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.334488] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0627f2f6-a839-4710-a855-be9ad4e948b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.346381] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802771, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.421273] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523314bc-839c-f28e-3120-8f880ba8e946, 'name': SearchDatastore_Task, 'duration_secs': 0.016573} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.421461] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.421849] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 37689ec5-4bc8-4361-9e43-00529e2b5538/37689ec5-4bc8-4361-9e43-00529e2b5538.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1711.422161] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81dfdbe1-535f-4ff5-9cf6-783c92a49c41 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.430983] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1711.430983] env[62519]: value = "task-1802772" [ 1711.430983] env[62519]: _type = "Task" [ 1711.430983] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.440164] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.519409] env[62519]: DEBUG nova.compute.manager [req-88060116-8289-4b6f-9f38-506ffeadcf71 req-066c86e1-d842-47cc-9d1a-0759612442f3 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Received event network-vif-deleted-6cce4a48-b732-4bd4-a39e-bbc701b31b3b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1711.519618] env[62519]: INFO nova.compute.manager [req-88060116-8289-4b6f-9f38-506ffeadcf71 req-066c86e1-d842-47cc-9d1a-0759612442f3 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Neutron deleted interface 6cce4a48-b732-4bd4-a39e-bbc701b31b3b; detaching it from the instance and deleting it from the info cache [ 1711.520711] env[62519]: DEBUG nova.network.neutron [req-88060116-8289-4b6f-9f38-506ffeadcf71 req-066c86e1-d842-47cc-9d1a-0759612442f3 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.698069] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.971s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.702635] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.162s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.702870] env[62519]: DEBUG nova.objects.instance [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lazy-loading 'resources' on Instance uuid a59be5e6-2316-4766-933a-4d01dfe4fec1 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1711.725981] env[62519]: INFO nova.scheduler.client.report [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Deleted allocations for instance c61c893f-826b-4874-b253-de6fbffa9e5a [ 1711.843466] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802771, 'name': ReconfigVM_Task, 'duration_secs': 0.55575} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.843794] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Reconfigured VM instance instance-00000041 to attach disk [datastore1] ad0af10d-5063-4344-b12f-1d3ee9ea1090/ad0af10d-5063-4344-b12f-1d3ee9ea1090.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1711.844090] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance 'ad0af10d-5063-4344-b12f-1d3ee9ea1090' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1711.944086] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802772, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.995355] env[62519]: DEBUG nova.network.neutron [-] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.022666] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8ac04f4-d077-4e44-b594-d0efd7765925 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.033747] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3107e2c-f32c-40fa-92e2-6e97487e50ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.072416] env[62519]: DEBUG nova.compute.manager [req-88060116-8289-4b6f-9f38-506ffeadcf71 req-066c86e1-d842-47cc-9d1a-0759612442f3 service nova] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Detach interface failed, port_id=6cce4a48-b732-4bd4-a39e-bbc701b31b3b, reason: Instance fe350d30-6fbd-4813-9634-ed05984fecfd could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1712.133821] env[62519]: DEBUG oslo_concurrency.lockutils [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "67424299-f100-49a1-ab73-0407b60a2d9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.133821] env[62519]: DEBUG oslo_concurrency.lockutils [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "67424299-f100-49a1-ab73-0407b60a2d9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.134014] env[62519]: DEBUG oslo_concurrency.lockutils [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "67424299-f100-49a1-ab73-0407b60a2d9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.135049] env[62519]: DEBUG oslo_concurrency.lockutils [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "67424299-f100-49a1-ab73-0407b60a2d9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.135049] env[62519]: DEBUG oslo_concurrency.lockutils [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "67424299-f100-49a1-ab73-0407b60a2d9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.136396] env[62519]: INFO nova.compute.manager [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Terminating instance [ 1712.172056] env[62519]: DEBUG nova.network.neutron [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Successfully updated port: c449f90e-6bf3-4dd2-a762-fcbf43301c6f {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1712.206557] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "83a680b9-0c2d-4231-9ddf-9aa90209c620" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.233743] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3f693238-92fb-4a51-a1b2-5d8290ec4c65 tempest-VolumesAdminNegativeTest-694306887 tempest-VolumesAdminNegativeTest-694306887-project-member] Lock "c61c893f-826b-4874-b253-de6fbffa9e5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.820s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.352548] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c36de20-69db-44c3-a818-87a41433bb58 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.377501] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df93a41c-3e57-49d6-a663-e7a9df2e596a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.397735] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance 'ad0af10d-5063-4344-b12f-1d3ee9ea1090' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1712.441795] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624156} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.442842] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 37689ec5-4bc8-4361-9e43-00529e2b5538/37689ec5-4bc8-4361-9e43-00529e2b5538.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1712.443079] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1712.443339] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3e2a8db-c87f-496a-b930-31aac82cead9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.453556] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1712.453556] env[62519]: value = "task-1802773" [ 1712.453556] env[62519]: _type = "Task" [ 1712.453556] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.465725] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802773, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.498375] env[62519]: INFO nova.compute.manager [-] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Took 1.46 seconds to deallocate network for instance. [ 1712.547115] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c5992d4-cb16-49ff-bdd0-3edf419b5b38 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "83a680b9-0c2d-4231-9ddf-9aa90209c620" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.865s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.547115] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "83a680b9-0c2d-4231-9ddf-9aa90209c620" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.341s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.547270] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "83a680b9-0c2d-4231-9ddf-9aa90209c620-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.547487] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "83a680b9-0c2d-4231-9ddf-9aa90209c620-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.547724] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "83a680b9-0c2d-4231-9ddf-9aa90209c620-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.549739] env[62519]: INFO nova.compute.manager [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Terminating instance [ 1712.569877] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bf62e9-16fe-4ff8-bfc9-b8320906e2ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.577892] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7523276-feec-4978-b6fe-97b9ea3f90c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.609368] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31ff793-25ac-475e-bb02-f39b47068daf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.617293] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e01e6e-d0d6-4288-bd66-b010c48d199d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.633125] env[62519]: DEBUG nova.compute.provider_tree [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1712.639578] env[62519]: DEBUG nova.compute.manager [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1712.639789] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1712.641735] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf00fad7-7e91-41b2-9fdf-156b65b65e08 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.650077] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1712.650349] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76084a54-9d6b-4e5f-ac72-dc6f08b6c68c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.657527] env[62519]: DEBUG oslo_vmware.api [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1712.657527] env[62519]: value = "task-1802774" [ 1712.657527] env[62519]: _type = "Task" [ 1712.657527] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.665780] env[62519]: DEBUG oslo_vmware.api [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802774, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.674442] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquiring lock "refresh_cache-0789b142-4712-4b7a-9197-c3689f24df7c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.674576] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquired lock "refresh_cache-0789b142-4712-4b7a-9197-c3689f24df7c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.674721] env[62519]: DEBUG nova.network.neutron [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1712.939259] env[62519]: DEBUG nova.network.neutron [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Port 925db578-876c-414b-8500-56c73c7cdfe8 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1712.965292] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802773, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071233} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.965666] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1712.968970] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec82bfeb-c0e2-4c5c-ba92-70ca5b4882d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.991817] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 37689ec5-4bc8-4361-9e43-00529e2b5538/37689ec5-4bc8-4361-9e43-00529e2b5538.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1712.992285] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93114f2b-04a9-437c-ba53-e4cdaa58894c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.008773] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.016305] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1713.016305] env[62519]: value = "task-1802775" [ 1713.016305] env[62519]: _type = "Task" [ 1713.016305] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.025414] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802775, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.053887] env[62519]: DEBUG nova.compute.manager [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1713.054383] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1713.055362] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7579294-63e3-44a7-b6a5-0ec8e2861324 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.064182] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1713.064490] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bd093ab-eaa7-4920-87a0-36f999855a96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.072900] env[62519]: DEBUG oslo_vmware.api [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1713.072900] env[62519]: value = "task-1802776" [ 1713.072900] env[62519]: _type = "Task" [ 1713.072900] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.082580] env[62519]: DEBUG oslo_vmware.api [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.136267] env[62519]: DEBUG nova.scheduler.client.report [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1713.171810] env[62519]: DEBUG oslo_vmware.api [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802774, 'name': PowerOffVM_Task, 'duration_secs': 0.19472} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.172090] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1713.172256] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1713.172518] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c2da87b-cf6f-4f6d-96be-04f4974cd7bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.208616] env[62519]: DEBUG nova.network.neutron [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1713.382735] env[62519]: DEBUG nova.network.neutron [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Updating instance_info_cache with network_info: [{"id": "c449f90e-6bf3-4dd2-a762-fcbf43301c6f", "address": "fa:16:3e:2b:41:f0", "network": {"id": "9e5853bc-09bb-411d-8e10-82257a3b47fa", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1773418268-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ccf89f05646414a8c8517f744692af3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc449f90e-6b", "ovs_interfaceid": "c449f90e-6bf3-4dd2-a762-fcbf43301c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.528855] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802775, 'name': ReconfigVM_Task, 'duration_secs': 0.399976} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.529249] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 37689ec5-4bc8-4361-9e43-00529e2b5538/37689ec5-4bc8-4361-9e43-00529e2b5538.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1713.529844] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dcc85141-e8cb-4e11-825d-dda9205a9a20 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.536741] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1713.536741] env[62519]: value = "task-1802778" [ 1713.536741] env[62519]: _type = "Task" [ 1713.536741] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.545118] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802778, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.582693] env[62519]: DEBUG oslo_vmware.api [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802776, 'name': PowerOffVM_Task, 'duration_secs': 0.212994} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.582963] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1713.583216] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1713.583385] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fba1e5e-cf33-494d-b41b-9bae42cd92ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.608324] env[62519]: DEBUG nova.compute.manager [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Received event network-vif-plugged-c449f90e-6bf3-4dd2-a762-fcbf43301c6f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1713.608443] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] Acquiring lock "0789b142-4712-4b7a-9197-c3689f24df7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.608627] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] Lock "0789b142-4712-4b7a-9197-c3689f24df7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.608815] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] Lock "0789b142-4712-4b7a-9197-c3689f24df7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.608953] env[62519]: DEBUG nova.compute.manager [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] No waiting events found dispatching network-vif-plugged-c449f90e-6bf3-4dd2-a762-fcbf43301c6f {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1713.609131] env[62519]: WARNING nova.compute.manager [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Received unexpected event network-vif-plugged-c449f90e-6bf3-4dd2-a762-fcbf43301c6f for instance with vm_state building and task_state spawning. [ 1713.609291] env[62519]: DEBUG nova.compute.manager [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Received event network-changed-c449f90e-6bf3-4dd2-a762-fcbf43301c6f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1713.609442] env[62519]: DEBUG nova.compute.manager [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Refreshing instance network info cache due to event network-changed-c449f90e-6bf3-4dd2-a762-fcbf43301c6f. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1713.609604] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] Acquiring lock "refresh_cache-0789b142-4712-4b7a-9197-c3689f24df7c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.642206] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.940s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.644739] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.034s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.646638] env[62519]: INFO nova.compute.claims [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1713.668680] env[62519]: INFO nova.scheduler.client.report [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Deleted allocations for instance a59be5e6-2316-4766-933a-4d01dfe4fec1 [ 1713.888031] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Releasing lock "refresh_cache-0789b142-4712-4b7a-9197-c3689f24df7c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1713.888031] env[62519]: DEBUG nova.compute.manager [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Instance network_info: |[{"id": "c449f90e-6bf3-4dd2-a762-fcbf43301c6f", "address": "fa:16:3e:2b:41:f0", "network": {"id": "9e5853bc-09bb-411d-8e10-82257a3b47fa", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1773418268-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ccf89f05646414a8c8517f744692af3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc449f90e-6b", "ovs_interfaceid": "c449f90e-6bf3-4dd2-a762-fcbf43301c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1713.888031] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] Acquired lock "refresh_cache-0789b142-4712-4b7a-9197-c3689f24df7c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.888031] env[62519]: DEBUG nova.network.neutron [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Refreshing network info cache for port c449f90e-6bf3-4dd2-a762-fcbf43301c6f {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1713.888031] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:41:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c449f90e-6bf3-4dd2-a762-fcbf43301c6f', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1713.895671] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Creating folder: Project (7ccf89f05646414a8c8517f744692af3). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1713.896794] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-031a635f-8bbd-4824-bc84-b47a3702569c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.911582] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Created folder: Project (7ccf89f05646414a8c8517f744692af3) in parent group-v373567. [ 1713.912019] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Creating folder: Instances. Parent ref: group-v373769. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1713.912374] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35032ce7-0326-4282-b670-276601d0854e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.921092] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Created folder: Instances in parent group-v373769. [ 1713.921936] env[62519]: DEBUG oslo.service.loopingcall [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1713.922253] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1713.922574] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74ff0f84-6e18-4a5e-9990-547205fd26e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.945083] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1713.945083] env[62519]: value = "task-1802782" [ 1713.945083] env[62519]: _type = "Task" [ 1713.945083] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.967072] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.967565] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.967885] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.969146] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802782, 'name': CreateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.051416] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802778, 'name': Rename_Task, 'duration_secs': 0.189553} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.051847] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1714.052211] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80189a3c-86b3-435d-8c6b-a8f02ea9179c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.063750] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1714.063750] env[62519]: value = "task-1802783" [ 1714.063750] env[62519]: _type = "Task" [ 1714.063750] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.073173] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802783, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.099150] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1714.099527] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1714.099816] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleting the datastore file [datastore1] 83a680b9-0c2d-4231-9ddf-9aa90209c620 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1714.101162] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb8d1c57-2ea7-46fc-b362-3d933116c88f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.103523] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1714.103717] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1714.103893] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleting the datastore file [datastore1] 67424299-f100-49a1-ab73-0407b60a2d9f {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1714.104197] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a3a8a3a-746a-4d3c-a935-3ae29271348e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.113775] env[62519]: DEBUG oslo_vmware.api [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1714.113775] env[62519]: value = "task-1802784" [ 1714.113775] env[62519]: _type = "Task" [ 1714.113775] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.114889] env[62519]: DEBUG oslo_vmware.api [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1714.114889] env[62519]: value = "task-1802785" [ 1714.114889] env[62519]: _type = "Task" [ 1714.114889] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.130686] env[62519]: DEBUG oslo_vmware.api [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802785, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.134034] env[62519]: DEBUG oslo_vmware.api [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.175308] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5fc67e40-6ac8-45ae-a471-6bc7d3a0937f tempest-ServersTestFqdnHostnames-1137700598 tempest-ServersTestFqdnHostnames-1137700598-project-member] Lock "a59be5e6-2316-4766-933a-4d01dfe4fec1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.707s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.456682] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802782, 'name': CreateVM_Task, 'duration_secs': 0.443928} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.456870] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1714.457579] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.457744] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.458089] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1714.458416] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d84beb93-e4db-41e3-8690-82d9d8d3f54b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.464566] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1714.464566] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]528dd6db-4f37-526f-9fea-c275cc803b47" [ 1714.464566] env[62519]: _type = "Task" [ 1714.464566] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.476211] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528dd6db-4f37-526f-9fea-c275cc803b47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.578222] env[62519]: DEBUG oslo_vmware.api [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802783, 'name': PowerOnVM_Task, 'duration_secs': 0.509933} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.578661] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1714.578893] env[62519]: INFO nova.compute.manager [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Took 5.34 seconds to spawn the instance on the hypervisor. [ 1714.579197] env[62519]: DEBUG nova.compute.manager [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1714.580316] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcf0d50-59f7-4486-bc0f-0bfc1760fb85 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.647225] env[62519]: DEBUG oslo_vmware.api [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180661} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.652187] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1714.653230] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1714.653230] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1714.653230] env[62519]: INFO nova.compute.manager [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Took 1.60 seconds to destroy the instance on the hypervisor. [ 1714.653399] env[62519]: DEBUG oslo.service.loopingcall [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.654056] env[62519]: DEBUG oslo_vmware.api [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802785, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184039} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.654334] env[62519]: DEBUG nova.compute.manager [-] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1714.654466] env[62519]: DEBUG nova.network.neutron [-] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1714.656942] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1714.657226] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1714.657488] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1714.657729] env[62519]: INFO nova.compute.manager [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Took 2.02 seconds to destroy the instance on the hypervisor. [ 1714.658041] env[62519]: DEBUG oslo.service.loopingcall [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.662217] env[62519]: DEBUG nova.compute.manager [-] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1714.662353] env[62519]: DEBUG nova.network.neutron [-] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1714.876065] env[62519]: DEBUG nova.network.neutron [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Updated VIF entry in instance network info cache for port c449f90e-6bf3-4dd2-a762-fcbf43301c6f. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1714.876065] env[62519]: DEBUG nova.network.neutron [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Updating instance_info_cache with network_info: [{"id": "c449f90e-6bf3-4dd2-a762-fcbf43301c6f", "address": "fa:16:3e:2b:41:f0", "network": {"id": "9e5853bc-09bb-411d-8e10-82257a3b47fa", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1773418268-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7ccf89f05646414a8c8517f744692af3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc449f90e-6b", "ovs_interfaceid": "c449f90e-6bf3-4dd2-a762-fcbf43301c6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.981065] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528dd6db-4f37-526f-9fea-c275cc803b47, 'name': SearchDatastore_Task, 'duration_secs': 0.016655} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.985187] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.985581] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1714.985963] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.986281] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.986563] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1714.988219] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4621a3f9-b232-482e-bc6f-caba4329ef2a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.003517] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1715.003517] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1715.003517] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4aa32a9d-b345-439f-91fb-7190e15bed60 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.017568] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1715.017568] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ede097-fd9c-b0cd-1117-72219bd39266" [ 1715.017568] env[62519]: _type = "Task" [ 1715.017568] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.033717] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59917ae-b6fa-4fd8-9fbe-72af74687327 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.042739] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ede097-fd9c-b0cd-1117-72219bd39266, 'name': SearchDatastore_Task} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.043682] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af0baafa-ce1d-43fd-83c1-87b346ed25f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.049399] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3880b230-28cc-42a7-9d3d-8ef1dffe6b2f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.054374] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1715.054374] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527f3321-669a-71d6-30eb-d02b19ed20fc" [ 1715.054374] env[62519]: _type = "Task" [ 1715.054374] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.084797] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.085573] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.085573] env[62519]: DEBUG nova.network.neutron [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1715.088382] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab805dc-fe65-4a8e-aa44-e909d38baef0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.098313] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527f3321-669a-71d6-30eb-d02b19ed20fc, 'name': SearchDatastore_Task, 'duration_secs': 0.028802} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.099190] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.099438] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 0789b142-4712-4b7a-9197-c3689f24df7c/0789b142-4712-4b7a-9197-c3689f24df7c.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1715.101949] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3db1f39d-5faf-4e95-8eb3-40521aac2c06 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.106680] env[62519]: INFO nova.compute.manager [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Took 41.93 seconds to build instance. [ 1715.110117] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e910c744-f8ce-43db-890d-97fceb0b676b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.116163] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1715.116163] env[62519]: value = "task-1802786" [ 1715.116163] env[62519]: _type = "Task" [ 1715.116163] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.133020] env[62519]: DEBUG nova.compute.provider_tree [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1715.136866] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802786, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.378290] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d3e57ec-93cb-4126-b855-2d0b9b53ea6b req-3a87754b-041b-494c-8b3a-48754b1820be service nova] Releasing lock "refresh_cache-0789b142-4712-4b7a-9197-c3689f24df7c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.631842] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802786, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.632751] env[62519]: DEBUG nova.scheduler.client.report [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1715.681874] env[62519]: DEBUG nova.compute.manager [req-788bb97e-4b0f-4919-a3ee-7a0bcc9adea1 req-687fecc8-78fb-4baf-a5c4-8e2e4100b333 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Received event network-vif-deleted-eac73fb2-d38f-49e3-939e-36903535dfeb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1715.682153] env[62519]: INFO nova.compute.manager [req-788bb97e-4b0f-4919-a3ee-7a0bcc9adea1 req-687fecc8-78fb-4baf-a5c4-8e2e4100b333 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Neutron deleted interface eac73fb2-d38f-49e3-939e-36903535dfeb; detaching it from the instance and deleting it from the info cache [ 1715.682408] env[62519]: DEBUG nova.network.neutron [req-788bb97e-4b0f-4919-a3ee-7a0bcc9adea1 req-687fecc8-78fb-4baf-a5c4-8e2e4100b333 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.796793] env[62519]: DEBUG nova.network.neutron [-] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.803931] env[62519]: DEBUG nova.network.neutron [-] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.972999] env[62519]: DEBUG nova.network.neutron [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance_info_cache with network_info: [{"id": "925db578-876c-414b-8500-56c73c7cdfe8", "address": "fa:16:3e:d9:6c:87", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925db578-87", "ovs_interfaceid": "925db578-876c-414b-8500-56c73c7cdfe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.130324] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802786, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.824135} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.130632] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 0789b142-4712-4b7a-9197-c3689f24df7c/0789b142-4712-4b7a-9197-c3689f24df7c.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1716.130802] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1716.131806] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c609db0-761f-40cc-9e5a-f150af180493 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.138571] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.492s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.139687] env[62519]: DEBUG nova.compute.manager [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1716.143346] env[62519]: DEBUG oslo_concurrency.lockutils [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.676s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.143607] env[62519]: DEBUG nova.objects.instance [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lazy-loading 'resources' on Instance uuid 1cf285cf-8b4c-4872-b179-72e38c0143e0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1716.146042] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1716.146042] env[62519]: value = "task-1802787" [ 1716.146042] env[62519]: _type = "Task" [ 1716.146042] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.157646] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802787, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.185872] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d610e1b-f979-4c3b-8d09-c62e1681cdc8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.199516] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7de778b-b6a8-4606-84af-c9f71ae74377 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.239041] env[62519]: DEBUG nova.compute.manager [req-788bb97e-4b0f-4919-a3ee-7a0bcc9adea1 req-687fecc8-78fb-4baf-a5c4-8e2e4100b333 service nova] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Detach interface failed, port_id=eac73fb2-d38f-49e3-939e-36903535dfeb, reason: Instance 83a680b9-0c2d-4231-9ddf-9aa90209c620 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1716.239192] env[62519]: DEBUG nova.compute.manager [req-788bb97e-4b0f-4919-a3ee-7a0bcc9adea1 req-687fecc8-78fb-4baf-a5c4-8e2e4100b333 service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Received event network-vif-deleted-ccc74c83-e994-4ae0-b4c0-92ba09a53009 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1716.239368] env[62519]: INFO nova.compute.manager [req-788bb97e-4b0f-4919-a3ee-7a0bcc9adea1 req-687fecc8-78fb-4baf-a5c4-8e2e4100b333 service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Neutron deleted interface ccc74c83-e994-4ae0-b4c0-92ba09a53009; detaching it from the instance and deleting it from the info cache [ 1716.239538] env[62519]: DEBUG nova.network.neutron [req-788bb97e-4b0f-4919-a3ee-7a0bcc9adea1 req-687fecc8-78fb-4baf-a5c4-8e2e4100b333 service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.299782] env[62519]: INFO nova.compute.manager [-] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Took 1.65 seconds to deallocate network for instance. [ 1716.306162] env[62519]: INFO nova.compute.manager [-] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Took 1.64 seconds to deallocate network for instance. [ 1716.478210] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.624911] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9d0f7f31-7c2c-4fa3-bc3d-67c656ee891c tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "37689ec5-4bc8-4361-9e43-00529e2b5538" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.459s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.648446] env[62519]: DEBUG nova.compute.utils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1716.650676] env[62519]: DEBUG nova.compute.manager [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1716.650852] env[62519]: DEBUG nova.network.neutron [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1716.666026] env[62519]: DEBUG nova.compute.manager [None req-b58ddda0-1329-4358-8e12-c17a0c453f2f tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1716.667193] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802787, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093808} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.668118] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd544299-e4a7-435f-aa36-1ac7221d4a39 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.671599] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1716.672465] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2bd7d9-68e3-4fd9-95a8-c3a76ab8fdbe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.704601] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 0789b142-4712-4b7a-9197-c3689f24df7c/0789b142-4712-4b7a-9197-c3689f24df7c.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1716.707916] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67f85be0-03f8-48a9-b3d2-b3fd5e25a8d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.724581] env[62519]: DEBUG nova.policy [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7ffc464fd6647e193f410aac2991943', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5fd21cccbea4d67a6fa4237436af0d7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1716.732891] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1716.732891] env[62519]: value = "task-1802788" [ 1716.732891] env[62519]: _type = "Task" [ 1716.732891] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.746493] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquiring lock "37689ec5-4bc8-4361-9e43-00529e2b5538" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.746678] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "37689ec5-4bc8-4361-9e43-00529e2b5538" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.746881] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquiring lock "37689ec5-4bc8-4361-9e43-00529e2b5538-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.747073] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "37689ec5-4bc8-4361-9e43-00529e2b5538-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.747244] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "37689ec5-4bc8-4361-9e43-00529e2b5538-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.748835] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802788, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.749413] env[62519]: INFO nova.compute.manager [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Terminating instance [ 1716.753291] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b972bb51-0f2a-4bb2-9529-e6a4ec5bbbd8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.767665] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0112424d-d624-4a41-8069-087f4cfdcb0b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.811205] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.811679] env[62519]: DEBUG nova.compute.manager [req-788bb97e-4b0f-4919-a3ee-7a0bcc9adea1 req-687fecc8-78fb-4baf-a5c4-8e2e4100b333 service nova] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Detach interface failed, port_id=ccc74c83-e994-4ae0-b4c0-92ba09a53009, reason: Instance 67424299-f100-49a1-ab73-0407b60a2d9f could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1716.815362] env[62519]: DEBUG oslo_concurrency.lockutils [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.001898] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f51dd7e-eb58-43f0-ba42-ca8fb9a6348a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.025547] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11869888-cc08-4fe4-985b-8d0bfb40fa44 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.036017] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance 'ad0af10d-5063-4344-b12f-1d3ee9ea1090' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1717.124069] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff0547e-7542-4260-baf4-3e2f955f40ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.133524] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae29d5c9-fa0d-4e67-a442-975eff7d5161 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.137868] env[62519]: DEBUG nova.network.neutron [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Successfully created port: 8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1717.168973] env[62519]: DEBUG nova.compute.manager [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1717.173669] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f75c696-721b-4b86-a75c-a542a3b18fee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.185806] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa0d786-4294-44a9-b1f6-a2918816e94e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.211564] env[62519]: INFO nova.compute.manager [None req-b58ddda0-1329-4358-8e12-c17a0c453f2f tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] instance snapshotting [ 1717.212447] env[62519]: DEBUG nova.objects.instance [None req-b58ddda0-1329-4358-8e12-c17a0c453f2f tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lazy-loading 'flavor' on Instance uuid 37689ec5-4bc8-4361-9e43-00529e2b5538 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1717.217586] env[62519]: DEBUG nova.compute.provider_tree [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1717.244431] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802788, 'name': ReconfigVM_Task, 'duration_secs': 0.326965} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.244725] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 0789b142-4712-4b7a-9197-c3689f24df7c/0789b142-4712-4b7a-9197-c3689f24df7c.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1717.245481] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c1b1445-ce8f-4127-9035-137b597a2a4d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.254171] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1717.254171] env[62519]: value = "task-1802789" [ 1717.254171] env[62519]: _type = "Task" [ 1717.254171] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.257562] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquiring lock "refresh_cache-37689ec5-4bc8-4361-9e43-00529e2b5538" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.257712] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquired lock "refresh_cache-37689ec5-4bc8-4361-9e43-00529e2b5538" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.257886] env[62519]: DEBUG nova.network.neutron [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1717.264730] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802789, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.541589] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1717.541893] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e3ab12f-a8b5-4be6-b08d-4c28b4ad89ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.550817] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1717.550817] env[62519]: value = "task-1802790" [ 1717.550817] env[62519]: _type = "Task" [ 1717.550817] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.559498] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.722830] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53086774-471e-4602-bad6-8a936e8228ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.741775] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727618f2-0715-411c-8e22-2193051c99a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.752575] env[62519]: DEBUG nova.scheduler.client.report [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1717.752668] env[62519]: DEBUG nova.compute.provider_tree [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 103 to 104 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1717.752864] env[62519]: DEBUG nova.compute.provider_tree [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1717.768131] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802789, 'name': Rename_Task, 'duration_secs': 0.179109} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.768998] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1717.769279] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32c64247-a996-4c61-9897-0ab4deefcd8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.776477] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1717.776477] env[62519]: value = "task-1802791" [ 1717.776477] env[62519]: _type = "Task" [ 1717.776477] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.781480] env[62519]: DEBUG nova.network.neutron [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1717.789772] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802791, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.862116] env[62519]: DEBUG nova.network.neutron [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.066034] env[62519]: DEBUG oslo_vmware.api [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802790, 'name': PowerOnVM_Task, 'duration_secs': 0.429518} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.066034] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1718.066034] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1ed07ba8-f069-4974-ad77-6c6cc84b3f48 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance 'ad0af10d-5063-4344-b12f-1d3ee9ea1090' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1718.179267] env[62519]: DEBUG nova.compute.manager [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1718.221109] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1718.221965] env[62519]: DEBUG nova.virt.hardware [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1718.223194] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2fcaae-890c-48e2-a4fb-0392ebb92887 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.233203] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dbe431-0538-497b-b9a8-a07e48fab4fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.259148] env[62519]: DEBUG nova.compute.manager [None req-b58ddda0-1329-4358-8e12-c17a0c453f2f tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Instance disappeared during snapshot {{(pid=62519) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4607}} [ 1718.264807] env[62519]: DEBUG oslo_concurrency.lockutils [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.121s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.268325] env[62519]: DEBUG oslo_concurrency.lockutils [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.035s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.269593] env[62519]: DEBUG nova.objects.instance [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lazy-loading 'resources' on Instance uuid 40507d8c-8f30-45d4-9c65-03f8b1271afb {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1718.290587] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802791, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.304923] env[62519]: INFO nova.scheduler.client.report [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted allocations for instance 1cf285cf-8b4c-4872-b179-72e38c0143e0 [ 1718.365461] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Releasing lock "refresh_cache-37689ec5-4bc8-4361-9e43-00529e2b5538" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.365980] env[62519]: DEBUG nova.compute.manager [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1718.366487] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1718.368300] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a17e63d-410c-4395-9be8-ef76be83a94e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.377309] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1718.378224] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c465a909-1f46-4b00-b58a-a3b04ac1168e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.386821] env[62519]: DEBUG oslo_vmware.api [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1718.386821] env[62519]: value = "task-1802792" [ 1718.386821] env[62519]: _type = "Task" [ 1718.386821] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.398574] env[62519]: DEBUG oslo_vmware.api [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.493785] env[62519]: DEBUG nova.compute.manager [None req-b58ddda0-1329-4358-8e12-c17a0c453f2f tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Found 0 images (rotation: 2) {{(pid=62519) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4998}} [ 1718.768180] env[62519]: DEBUG nova.compute.manager [req-a4eaa0d9-99cb-42c0-bf96-4da60566fbef req-e6093c09-13b4-488e-b22d-f1b233fac93e service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Received event network-vif-plugged-8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1718.768408] env[62519]: DEBUG oslo_concurrency.lockutils [req-a4eaa0d9-99cb-42c0-bf96-4da60566fbef req-e6093c09-13b4-488e-b22d-f1b233fac93e service nova] Acquiring lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.769497] env[62519]: DEBUG oslo_concurrency.lockutils [req-a4eaa0d9-99cb-42c0-bf96-4da60566fbef req-e6093c09-13b4-488e-b22d-f1b233fac93e service nova] Lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.769497] env[62519]: DEBUG oslo_concurrency.lockutils [req-a4eaa0d9-99cb-42c0-bf96-4da60566fbef req-e6093c09-13b4-488e-b22d-f1b233fac93e service nova] Lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.769497] env[62519]: DEBUG nova.compute.manager [req-a4eaa0d9-99cb-42c0-bf96-4da60566fbef req-e6093c09-13b4-488e-b22d-f1b233fac93e service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] No waiting events found dispatching network-vif-plugged-8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1718.769732] env[62519]: WARNING nova.compute.manager [req-a4eaa0d9-99cb-42c0-bf96-4da60566fbef req-e6093c09-13b4-488e-b22d-f1b233fac93e service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Received unexpected event network-vif-plugged-8c956f8c-d517-457d-add9-83f72e6718dc for instance with vm_state building and task_state spawning. [ 1718.775638] env[62519]: DEBUG nova.objects.instance [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lazy-loading 'numa_topology' on Instance uuid 40507d8c-8f30-45d4-9c65-03f8b1271afb {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1718.789082] env[62519]: DEBUG oslo_vmware.api [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802791, 'name': PowerOnVM_Task, 'duration_secs': 0.733581} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.790173] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1718.790173] env[62519]: INFO nova.compute.manager [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Took 7.53 seconds to spawn the instance on the hypervisor. [ 1718.790405] env[62519]: DEBUG nova.compute.manager [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1718.791417] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def461bf-b2a0-4536-b9d3-0d397b919b77 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.815490] env[62519]: DEBUG oslo_concurrency.lockutils [None req-19e91ec7-750b-41f0-83be-2b922f225694 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "1cf285cf-8b4c-4872-b179-72e38c0143e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.953s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.898373] env[62519]: DEBUG oslo_vmware.api [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802792, 'name': PowerOffVM_Task, 'duration_secs': 0.152059} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.898846] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1718.899164] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1718.899515] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99cac22b-d63c-447d-ad1e-ff428fc8d06f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.926961] env[62519]: DEBUG nova.network.neutron [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Successfully updated port: 8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1718.934182] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1718.934182] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1718.934182] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Deleting the datastore file [datastore1] 37689ec5-4bc8-4361-9e43-00529e2b5538 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1718.934182] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4bd572e-29b5-4fa5-b1ff-03e0b7446730 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.946538] env[62519]: DEBUG oslo_vmware.api [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for the task: (returnval){ [ 1718.946538] env[62519]: value = "task-1802794" [ 1718.946538] env[62519]: _type = "Task" [ 1718.946538] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.958411] env[62519]: DEBUG oslo_vmware.api [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802794, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.280964] env[62519]: DEBUG nova.objects.base [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Object Instance<40507d8c-8f30-45d4-9c65-03f8b1271afb> lazy-loaded attributes: resources,numa_topology {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1719.315802] env[62519]: INFO nova.compute.manager [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Took 37.16 seconds to build instance. [ 1719.431302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquiring lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.431449] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquired lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.431599] env[62519]: DEBUG nova.network.neutron [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1719.459760] env[62519]: DEBUG oslo_vmware.api [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Task: {'id': task-1802794, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099741} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.460046] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1719.460238] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1719.460413] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1719.460659] env[62519]: INFO nova.compute.manager [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1719.460922] env[62519]: DEBUG oslo.service.loopingcall [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1719.461131] env[62519]: DEBUG nova.compute.manager [-] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1719.461752] env[62519]: DEBUG nova.network.neutron [-] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1719.482351] env[62519]: DEBUG nova.network.neutron [-] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1719.652021] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e42ac5f-4dd2-4323-b657-80cca529d6ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.658140] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec38b404-68ad-4e7b-bfa3-fc463aab308a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.691073] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9475ad-d644-4921-8fae-b01aaef7fa91 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.698502] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bd0340d-5c3f-465b-9e78-38b536e3a079 tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquiring lock "interface-0789b142-4712-4b7a-9197-c3689f24df7c-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.698962] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bd0340d-5c3f-465b-9e78-38b536e3a079 tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "interface-0789b142-4712-4b7a-9197-c3689f24df7c-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.699443] env[62519]: DEBUG nova.objects.instance [None req-3bd0340d-5c3f-465b-9e78-38b536e3a079 tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lazy-loading 'flavor' on Instance uuid 0789b142-4712-4b7a-9197-c3689f24df7c {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1719.702113] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef15d32-d4d7-43e4-b3e3-3dbb30c257f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.717484] env[62519]: DEBUG nova.compute.provider_tree [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.974980] env[62519]: DEBUG nova.network.neutron [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1719.986730] env[62519]: DEBUG nova.network.neutron [-] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.141166] env[62519]: DEBUG nova.network.neutron [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Updating instance_info_cache with network_info: [{"id": "8c956f8c-d517-457d-add9-83f72e6718dc", "address": "fa:16:3e:80:d4:5e", "network": {"id": "d62cba85-2cb0-4dab-a59f-7004ed84a75f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-943660167-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5fd21cccbea4d67a6fa4237436af0d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c956f8c-d5", "ovs_interfaceid": "8c956f8c-d517-457d-add9-83f72e6718dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.208443] env[62519]: DEBUG nova.objects.instance [None req-3bd0340d-5c3f-465b-9e78-38b536e3a079 tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lazy-loading 'pci_requests' on Instance uuid 0789b142-4712-4b7a-9197-c3689f24df7c {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1720.221522] env[62519]: DEBUG nova.scheduler.client.report [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1720.490110] env[62519]: INFO nova.compute.manager [-] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Took 1.03 seconds to deallocate network for instance. [ 1720.644979] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Releasing lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.645363] env[62519]: DEBUG nova.compute.manager [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Instance network_info: |[{"id": "8c956f8c-d517-457d-add9-83f72e6718dc", "address": "fa:16:3e:80:d4:5e", "network": {"id": "d62cba85-2cb0-4dab-a59f-7004ed84a75f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-943660167-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5fd21cccbea4d67a6fa4237436af0d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c956f8c-d5", "ovs_interfaceid": "8c956f8c-d517-457d-add9-83f72e6718dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1720.645761] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:d4:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c956f8c-d517-457d-add9-83f72e6718dc', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1720.654068] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Creating folder: Project (b5fd21cccbea4d67a6fa4237436af0d7). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1720.654365] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e1ad5ae-9717-4188-930d-8e84bb99294a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.668117] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Created folder: Project (b5fd21cccbea4d67a6fa4237436af0d7) in parent group-v373567. [ 1720.668431] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Creating folder: Instances. Parent ref: group-v373772. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1720.668743] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef302a9d-e394-47d9-b0ae-09019b78101c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.680415] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Created folder: Instances in parent group-v373772. [ 1720.680677] env[62519]: DEBUG oslo.service.loopingcall [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1720.680881] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1720.681157] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea121a71-b9ae-437e-a9a8-ca007db5b349 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.704743] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1720.704743] env[62519]: value = "task-1802797" [ 1720.704743] env[62519]: _type = "Task" [ 1720.704743] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.713718] env[62519]: DEBUG nova.objects.base [None req-3bd0340d-5c3f-465b-9e78-38b536e3a079 tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Object Instance<0789b142-4712-4b7a-9197-c3689f24df7c> lazy-loaded attributes: flavor,pci_requests {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1720.713807] env[62519]: DEBUG nova.network.neutron [None req-3bd0340d-5c3f-465b-9e78-38b536e3a079 tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1720.715879] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802797, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.725976] env[62519]: DEBUG oslo_concurrency.lockutils [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.458s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.728658] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.004s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.729218] env[62519]: DEBUG nova.objects.instance [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lazy-loading 'resources' on Instance uuid 54a8aa34-1595-4494-ba68-6915611631ce {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1720.778027] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.778027] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.816272] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bd0340d-5c3f-465b-9e78-38b536e3a079 tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "interface-0789b142-4712-4b7a-9197-c3689f24df7c-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.117s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.823296] env[62519]: DEBUG oslo_concurrency.lockutils [None req-29e9d70a-91ad-45d4-8dc5-675fe322e52d tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "0789b142-4712-4b7a-9197-c3689f24df7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.674s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.869428] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.869685] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.869875] env[62519]: DEBUG nova.compute.manager [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Going to confirm migration 3 {{(pid=62519) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5235}} [ 1720.996484] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.217613] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802797, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.239487] env[62519]: DEBUG oslo_concurrency.lockutils [None req-da318ded-1134-4cd5-93b1-e00afd02b3d1 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 41.033s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.240519] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 17.715s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.241503] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "40507d8c-8f30-45d4-9c65-03f8b1271afb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.241503] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.241503] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.245551] env[62519]: INFO nova.compute.manager [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Terminating instance [ 1721.282072] env[62519]: DEBUG nova.compute.manager [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1721.304403] env[62519]: DEBUG nova.compute.manager [req-4f26d4d6-0b8f-4ce5-ac71-66dd8375404c req-f61c6106-4ae0-476d-b5a3-f04e10cd308d service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Received event network-changed-8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1721.304655] env[62519]: DEBUG nova.compute.manager [req-4f26d4d6-0b8f-4ce5-ac71-66dd8375404c req-f61c6106-4ae0-476d-b5a3-f04e10cd308d service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Refreshing instance network info cache due to event network-changed-8c956f8c-d517-457d-add9-83f72e6718dc. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1721.304848] env[62519]: DEBUG oslo_concurrency.lockutils [req-4f26d4d6-0b8f-4ce5-ac71-66dd8375404c req-f61c6106-4ae0-476d-b5a3-f04e10cd308d service nova] Acquiring lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.304995] env[62519]: DEBUG oslo_concurrency.lockutils [req-4f26d4d6-0b8f-4ce5-ac71-66dd8375404c req-f61c6106-4ae0-476d-b5a3-f04e10cd308d service nova] Acquired lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.305454] env[62519]: DEBUG nova.network.neutron [req-4f26d4d6-0b8f-4ce5-ac71-66dd8375404c req-f61c6106-4ae0-476d-b5a3-f04e10cd308d service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Refreshing network info cache for port 8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1721.477169] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.477427] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.477652] env[62519]: DEBUG nova.network.neutron [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1721.477797] env[62519]: DEBUG nova.objects.instance [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lazy-loading 'info_cache' on Instance uuid ad0af10d-5063-4344-b12f-1d3ee9ea1090 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1721.615017] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aecc670-0601-4b8f-9277-5abe994a1a71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.623341] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29f4327-bbe3-4619-a526-7c09a93035e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.656475] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69bf3fb-52a6-457a-bc90-d3427ef5c7d9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.664879] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e64b9c-e5d8-4568-83fb-dd6388b30c49 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.680628] env[62519]: DEBUG nova.compute.provider_tree [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.716283] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802797, 'name': CreateVM_Task, 'duration_secs': 0.628724} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.716460] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1721.717300] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.717474] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.717822] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1721.718137] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f16575f0-20fa-49ab-989d-2516c7710c7f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.723317] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1721.723317] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]528b3376-6dd3-8efe-4d66-5c3e6ada05c0" [ 1721.723317] env[62519]: _type = "Task" [ 1721.723317] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.732493] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528b3376-6dd3-8efe-4d66-5c3e6ada05c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.750498] env[62519]: DEBUG nova.compute.manager [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1721.750750] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1721.751301] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ae21333-27a9-4f4e-b964-63a0543485ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.762373] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbafb4f1-b4c5-446c-949a-d8cf00e389c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.804343] env[62519]: WARNING nova.virt.vmwareapi.vmops [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 40507d8c-8f30-45d4-9c65-03f8b1271afb could not be found. [ 1721.804558] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1721.804739] env[62519]: INFO nova.compute.manager [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1721.804988] env[62519]: DEBUG oslo.service.loopingcall [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1721.808267] env[62519]: DEBUG nova.compute.manager [-] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1721.808370] env[62519]: DEBUG nova.network.neutron [-] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1721.812937] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "156ed02a-3365-4a4f-b4de-ea86920d3baf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.813183] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "156ed02a-3365-4a4f-b4de-ea86920d3baf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.825292] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.163263] env[62519]: DEBUG nova.network.neutron [req-4f26d4d6-0b8f-4ce5-ac71-66dd8375404c req-f61c6106-4ae0-476d-b5a3-f04e10cd308d service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Updated VIF entry in instance network info cache for port 8c956f8c-d517-457d-add9-83f72e6718dc. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1722.163673] env[62519]: DEBUG nova.network.neutron [req-4f26d4d6-0b8f-4ce5-ac71-66dd8375404c req-f61c6106-4ae0-476d-b5a3-f04e10cd308d service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Updating instance_info_cache with network_info: [{"id": "8c956f8c-d517-457d-add9-83f72e6718dc", "address": "fa:16:3e:80:d4:5e", "network": {"id": "d62cba85-2cb0-4dab-a59f-7004ed84a75f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-943660167-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5fd21cccbea4d67a6fa4237436af0d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c956f8c-d5", "ovs_interfaceid": "8c956f8c-d517-457d-add9-83f72e6718dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.184598] env[62519]: DEBUG nova.scheduler.client.report [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1722.234248] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528b3376-6dd3-8efe-4d66-5c3e6ada05c0, 'name': SearchDatastore_Task, 'duration_secs': 0.011295} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.234507] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.234739] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1722.234975] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.235122] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.235295] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1722.235546] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11fc08c6-10c6-4aef-8059-7e597099e15e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.243799] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1722.243961] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1722.244688] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61bd350b-0e02-4ffb-9673-aa6fb3cc6e68 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.249688] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1722.249688] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b3e722-73aa-eac5-ba2f-b1b36026c533" [ 1722.249688] env[62519]: _type = "Task" [ 1722.249688] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.258577] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b3e722-73aa-eac5-ba2f-b1b36026c533, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.315369] env[62519]: DEBUG nova.compute.manager [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1722.666793] env[62519]: DEBUG oslo_concurrency.lockutils [req-4f26d4d6-0b8f-4ce5-ac71-66dd8375404c req-f61c6106-4ae0-476d-b5a3-f04e10cd308d service nova] Releasing lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.682325] env[62519]: DEBUG nova.network.neutron [-] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.688637] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.960s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.691257] env[62519]: DEBUG nova.network.neutron [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance_info_cache with network_info: [{"id": "925db578-876c-414b-8500-56c73c7cdfe8", "address": "fa:16:3e:d9:6c:87", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925db578-87", "ovs_interfaceid": "925db578-876c-414b-8500-56c73c7cdfe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.692453] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.816s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.717144] env[62519]: INFO nova.scheduler.client.report [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted allocations for instance 54a8aa34-1595-4494-ba68-6915611631ce [ 1722.764193] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b3e722-73aa-eac5-ba2f-b1b36026c533, 'name': SearchDatastore_Task, 'duration_secs': 0.009824} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.764984] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64cc3727-dd10-4b33-81e9-142deef8d6a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.771635] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1722.771635] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523b533a-82ca-72f8-3bad-587d029dbef5" [ 1722.771635] env[62519]: _type = "Task" [ 1722.771635] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.781179] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523b533a-82ca-72f8-3bad-587d029dbef5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.835871] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.185174] env[62519]: INFO nova.compute.manager [-] [instance: 40507d8c-8f30-45d4-9c65-03f8b1271afb] Took 1.38 seconds to deallocate network for instance. [ 1723.207650] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-ad0af10d-5063-4344-b12f-1d3ee9ea1090" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.207650] env[62519]: DEBUG nova.objects.instance [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lazy-loading 'migration_context' on Instance uuid ad0af10d-5063-4344-b12f-1d3ee9ea1090 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1723.224607] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7126d37a-0998-4413-bff4-8ca1a98f18aa tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "54a8aa34-1595-4494-ba68-6915611631ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.527s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.284085] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523b533a-82ca-72f8-3bad-587d029dbef5, 'name': SearchDatastore_Task, 'duration_secs': 0.009594} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.284402] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.284868] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d710c97b-a2fd-4a54-baaa-ec7664895ce7/d710c97b-a2fd-4a54-baaa-ec7664895ce7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1723.285240] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ad12296-2803-4529-a188-65a0cb8da66b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.293731] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1723.293731] env[62519]: value = "task-1802798" [ 1723.293731] env[62519]: _type = "Task" [ 1723.293731] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.303787] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.595744] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.596082] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.596355] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.596553] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.596812] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.599872] env[62519]: INFO nova.compute.manager [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Terminating instance [ 1723.710372] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Applying migration context for instance ad0af10d-5063-4344-b12f-1d3ee9ea1090 as it has an incoming, in-progress migration 7e488268-8aed-49a2-a0b0-dbd98ced6c2b. Migration status is confirming {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1723.712197] env[62519]: INFO nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating resource usage from migration 7e488268-8aed-49a2-a0b0-dbd98ced6c2b [ 1723.716090] env[62519]: DEBUG nova.objects.base [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1723.716487] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4304bc27-b56a-4c19-87b1-0ecadfc6453c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.741634] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 11d4a010-959f-4f53-94dc-7499007612ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.741634] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 34d2991e-b6df-473d-8994-e45ff57ef131 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.741634] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 099112ae-569b-4853-bc47-b0b8b97d2525 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1723.741634] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance fe350d30-6fbd-4813-9634-ed05984fecfd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1723.741634] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 765cf18e-53a0-4cc6-ad0e-337a6f68915c actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.741634] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f0925a44-c15b-4415-99bc-1b2366292fe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.741634] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.741634] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c60f5d73-9d6d-4b5f-b71b-00b6b787d482 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.741634] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 46b3a0fb-29f6-4b66-a091-2d125b69d109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.741634] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance bace23b3-b7f4-4f3b-8986-0076440d096d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.742124] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 417588f8-6288-4ecd-9764-dbc923549c5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.742124] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 99f22198-1a65-4d0d-b665-90c7063dbdb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.742124] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 9f71845a-e80c-4822-b3de-717f1d83bc49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.742124] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 88f9351c-253b-49dd-a88e-b8585ea742ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.742509] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 67424299-f100-49a1-ab73-0407b60a2d9f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1723.742711] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 83a680b9-0c2d-4231-9ddf-9aa90209c620 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1723.742902] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Migration 7e488268-8aed-49a2-a0b0-dbd98ced6c2b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1723.743111] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance ad0af10d-5063-4344-b12f-1d3ee9ea1090 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.743344] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 37689ec5-4bc8-4361-9e43-00529e2b5538 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1723.743558] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 0789b142-4712-4b7a-9197-c3689f24df7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.743712] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance d710c97b-a2fd-4a54-baaa-ec7664895ce7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1723.745802] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccfa5bd9-40be-4a34-8b63-ef275af68b59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.754237] env[62519]: DEBUG oslo_vmware.api [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1723.754237] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527dfcc2-009a-d7b0-a36b-89974fb45fc5" [ 1723.754237] env[62519]: _type = "Task" [ 1723.754237] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.768035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquiring lock "0789b142-4712-4b7a-9197-c3689f24df7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.768233] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "0789b142-4712-4b7a-9197-c3689f24df7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.768425] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquiring lock "0789b142-4712-4b7a-9197-c3689f24df7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.768615] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "0789b142-4712-4b7a-9197-c3689f24df7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.768781] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "0789b142-4712-4b7a-9197-c3689f24df7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.770734] env[62519]: DEBUG oslo_vmware.api [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527dfcc2-009a-d7b0-a36b-89974fb45fc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.771210] env[62519]: INFO nova.compute.manager [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Terminating instance [ 1723.804163] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802798, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.085913] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "dd60afd6-2834-4fca-a846-e39d57aabd60" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.085996] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "dd60afd6-2834-4fca-a846-e39d57aabd60" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.104687] env[62519]: DEBUG nova.compute.manager [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1724.105137] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1724.106200] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53da5107-1590-4700-80ac-8fe3ef58de99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.115120] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1724.115343] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-694f0a21-3ada-45ca-818f-2c82a2f8c6b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.122318] env[62519]: DEBUG oslo_vmware.api [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1724.122318] env[62519]: value = "task-1802799" [ 1724.122318] env[62519]: _type = "Task" [ 1724.122318] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.130715] env[62519]: DEBUG oslo_vmware.api [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802799, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.211519] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee5b9640-c92a-4528-839d-7cce77d32182 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "40507d8c-8f30-45d4-9c65-03f8b1271afb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.971s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.246873] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance e3a19583-b332-40e3-bdd0-d254f7a78b0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1724.266206] env[62519]: DEBUG oslo_vmware.api [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527dfcc2-009a-d7b0-a36b-89974fb45fc5, 'name': SearchDatastore_Task, 'duration_secs': 0.031077} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.266533] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.274798] env[62519]: DEBUG nova.compute.manager [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1724.275019] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1724.275904] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0bc894-f219-4603-970f-e6db8a31809f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.284516] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1724.284827] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-250595c1-99c5-4080-921c-42b1f12ec904 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.293969] env[62519]: DEBUG oslo_vmware.api [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1724.293969] env[62519]: value = "task-1802800" [ 1724.293969] env[62519]: _type = "Task" [ 1724.293969] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.314101] env[62519]: DEBUG oslo_vmware.api [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.318767] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802798, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533654} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.319130] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d710c97b-a2fd-4a54-baaa-ec7664895ce7/d710c97b-a2fd-4a54-baaa-ec7664895ce7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1724.319398] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1724.319698] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35eed1c7-910c-4e5e-b643-ec48b845a84a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.329024] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1724.329024] env[62519]: value = "task-1802801" [ 1724.329024] env[62519]: _type = "Task" [ 1724.329024] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.340055] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802801, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.589315] env[62519]: DEBUG nova.compute.manager [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1724.635877] env[62519]: DEBUG oslo_vmware.api [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802799, 'name': PowerOffVM_Task, 'duration_secs': 0.214721} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.636193] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1724.636367] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1724.636610] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff03914c-e3b5-4df3-85df-5dd02f8a6f75 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.751007] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 156ed02a-3365-4a4f-b4de-ea86920d3baf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1724.804784] env[62519]: DEBUG oslo_vmware.api [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802800, 'name': PowerOffVM_Task, 'duration_secs': 0.292622} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.805094] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1724.805262] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1724.805511] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-136ac29d-c0dc-4c0d-b6ff-0a8bd855b89d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.839357] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802801, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072063} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.839648] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1724.840446] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f8ac6b-ad7a-4e2f-9bc4-6b7b97cd3d1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.864018] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] d710c97b-a2fd-4a54-baaa-ec7664895ce7/d710c97b-a2fd-4a54-baaa-ec7664895ce7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1724.864018] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20e21ee7-450b-4a0c-92e1-73af7d06d0ff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.884988] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1724.884988] env[62519]: value = "task-1802804" [ 1724.884988] env[62519]: _type = "Task" [ 1724.884988] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.886717] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1724.887016] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1724.887316] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleting the datastore file [datastore1] c60f5d73-9d6d-4b5f-b71b-00b6b787d482 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1724.891571] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdd32c88-f015-4e8c-ba82-c3fab175483f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.902041] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802804, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.903546] env[62519]: DEBUG oslo_vmware.api [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1724.903546] env[62519]: value = "task-1802805" [ 1724.903546] env[62519]: _type = "Task" [ 1724.903546] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.914307] env[62519]: DEBUG oslo_vmware.api [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.958507] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1724.958865] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1724.959099] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Deleting the datastore file [datastore1] 0789b142-4712-4b7a-9197-c3689f24df7c {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1724.959399] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c00aeb4-b016-4195-86e6-ede85dcc7e8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.967231] env[62519]: DEBUG oslo_vmware.api [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for the task: (returnval){ [ 1724.967231] env[62519]: value = "task-1802806" [ 1724.967231] env[62519]: _type = "Task" [ 1724.967231] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.976213] env[62519]: DEBUG oslo_vmware.api [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.111394] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.151992] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "c8816718-0a35-4474-b162-c619b0acc154" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.152244] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "c8816718-0a35-4474-b162-c619b0acc154" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.254624] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance dd60afd6-2834-4fca-a846-e39d57aabd60 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1725.254870] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1725.255029] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1725.396993] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802804, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.417911] env[62519]: DEBUG oslo_vmware.api [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802805, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294705} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.418298] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1725.418515] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1725.418844] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1725.419070] env[62519]: INFO nova.compute.manager [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1725.420037] env[62519]: DEBUG oslo.service.loopingcall [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1725.420037] env[62519]: DEBUG nova.compute.manager [-] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1725.420037] env[62519]: DEBUG nova.network.neutron [-] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1725.478444] env[62519]: DEBUG oslo_vmware.api [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Task: {'id': task-1802806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253715} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.478630] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1725.478824] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1725.479062] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1725.479289] env[62519]: INFO nova.compute.manager [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1725.479572] env[62519]: DEBUG oslo.service.loopingcall [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1725.479804] env[62519]: DEBUG nova.compute.manager [-] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1725.479921] env[62519]: DEBUG nova.network.neutron [-] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1725.654603] env[62519]: DEBUG nova.compute.manager [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1725.700778] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29634ae0-812d-404a-a5ac-8a46366e9cc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.712401] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e720d791-ee62-47af-8bde-c766a50aef8c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.756226] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53755a7-6bb4-424d-9f98-656e187c1f0e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.764956] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0277554-9d1c-4c79-881f-c1f2e0431191 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.782580] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1725.893494] env[62519]: DEBUG nova.compute.manager [req-be4341db-d52d-4101-b974-aa09eb9ac0aa req-4ede519a-b712-4202-b558-df03e909e184 service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Received event network-vif-deleted-76a9613b-a465-4420-ab6e-fd38db7bccac {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1725.893699] env[62519]: INFO nova.compute.manager [req-be4341db-d52d-4101-b974-aa09eb9ac0aa req-4ede519a-b712-4202-b558-df03e909e184 service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Neutron deleted interface 76a9613b-a465-4420-ab6e-fd38db7bccac; detaching it from the instance and deleting it from the info cache [ 1725.893871] env[62519]: DEBUG nova.network.neutron [req-be4341db-d52d-4101-b974-aa09eb9ac0aa req-4ede519a-b712-4202-b558-df03e909e184 service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.900820] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802804, 'name': ReconfigVM_Task, 'duration_secs': 0.52723} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.901095] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Reconfigured VM instance instance-0000004c to attach disk [datastore1] d710c97b-a2fd-4a54-baaa-ec7664895ce7/d710c97b-a2fd-4a54-baaa-ec7664895ce7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1725.901697] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-380ef6f6-ec2a-4162-b01b-695be515401b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.909813] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1725.909813] env[62519]: value = "task-1802807" [ 1725.909813] env[62519]: _type = "Task" [ 1725.909813] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.922741] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802807, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.176724] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.291059] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1726.317177] env[62519]: DEBUG nova.network.neutron [-] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.346403] env[62519]: DEBUG nova.network.neutron [-] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.396188] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65f5da31-2d85-4dfe-88f0-1787bf458904 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.406495] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88e8662-abca-4199-ad0a-786d6622319f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.425345] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802807, 'name': Rename_Task, 'duration_secs': 0.147099} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.425606] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1726.425838] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbe8c9e8-fca5-4631-ae09-c396a44f4295 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.441352] env[62519]: DEBUG nova.compute.manager [req-be4341db-d52d-4101-b974-aa09eb9ac0aa req-4ede519a-b712-4202-b558-df03e909e184 service nova] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Detach interface failed, port_id=76a9613b-a465-4420-ab6e-fd38db7bccac, reason: Instance c60f5d73-9d6d-4b5f-b71b-00b6b787d482 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1726.442797] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1726.442797] env[62519]: value = "task-1802808" [ 1726.442797] env[62519]: _type = "Task" [ 1726.442797] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.450530] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.796166] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1726.796440] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.104s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.796723] env[62519]: DEBUG oslo_concurrency.lockutils [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.486s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.796908] env[62519]: DEBUG oslo_concurrency.lockutils [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.799263] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.791s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.799461] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.801201] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.990s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.801382] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.803688] env[62519]: DEBUG oslo_concurrency.lockutils [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.988s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.803866] env[62519]: DEBUG oslo_concurrency.lockutils [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.805422] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.809s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.805609] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.807175] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.982s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.808781] env[62519]: INFO nova.compute.claims [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1726.819945] env[62519]: INFO nova.compute.manager [-] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Took 1.40 seconds to deallocate network for instance. [ 1726.838486] env[62519]: INFO nova.scheduler.client.report [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted allocations for instance 099112ae-569b-4853-bc47-b0b8b97d2525 [ 1726.841432] env[62519]: INFO nova.scheduler.client.report [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Deleted allocations for instance fe350d30-6fbd-4813-9634-ed05984fecfd [ 1726.849332] env[62519]: INFO nova.compute.manager [-] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Took 1.37 seconds to deallocate network for instance. [ 1726.860326] env[62519]: INFO nova.scheduler.client.report [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleted allocations for instance 83a680b9-0c2d-4231-9ddf-9aa90209c620 [ 1726.866831] env[62519]: INFO nova.scheduler.client.report [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Deleted allocations for instance 37689ec5-4bc8-4361-9e43-00529e2b5538 [ 1726.873207] env[62519]: INFO nova.scheduler.client.report [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleted allocations for instance 67424299-f100-49a1-ab73-0407b60a2d9f [ 1726.959760] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802808, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.326428] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.357288] env[62519]: DEBUG oslo_concurrency.lockutils [None req-be17eeca-b00a-4055-b9b2-f92649e4fba8 tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "099112ae-569b-4853-bc47-b0b8b97d2525" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.238s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.358870] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.359313] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e5810b0-0157-49eb-9f20-d1455cdb81ec tempest-MigrationsAdminTest-1925339216 tempest-MigrationsAdminTest-1925339216-project-member] Lock "fe350d30-6fbd-4813-9634-ed05984fecfd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.972s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.374079] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ed7ce4d9-ae28-4b02-96eb-927a70e00742 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "83a680b9-0c2d-4231-9ddf-9aa90209c620" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.827s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.378851] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f420554c-c19d-4ffe-bdae-cf1b2a9f54e7 tempest-ServersAaction247Test-895786641 tempest-ServersAaction247Test-895786641-project-member] Lock "37689ec5-4bc8-4361-9e43-00529e2b5538" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.630s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.382388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-55b9ec2f-1756-4ef7-bc20-53ed174522c1 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "67424299-f100-49a1-ab73-0407b60a2d9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.248s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.457252] env[62519]: DEBUG oslo_vmware.api [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802808, 'name': PowerOnVM_Task, 'duration_secs': 0.540042} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.457547] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1727.457755] env[62519]: INFO nova.compute.manager [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Took 9.28 seconds to spawn the instance on the hypervisor. [ 1727.457947] env[62519]: DEBUG nova.compute.manager [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1727.458901] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa1f1e9-9b2e-49b9-8ac7-f919e2fd166f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.938721] env[62519]: DEBUG nova.compute.manager [req-68711e2b-0772-4cf1-aae9-ca526f3f22fe req-c25086df-f9b6-4ea4-a147-05802da241d4 service nova] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Received event network-vif-deleted-c449f90e-6bf3-4dd2-a762-fcbf43301c6f {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1727.979473] env[62519]: INFO nova.compute.manager [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Took 32.38 seconds to build instance. [ 1728.156323] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c30d9b2-b851-40c9-b8c1-8fff7197e536 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.165296] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c470d5ca-fe97-4487-8867-d9d87b662bca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.200578] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35537ede-9c33-4f74-864c-383795aebb7e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.206089] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.206334] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.213977] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6f4d36-0718-4aed-856d-8e59f5aa1387 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.228836] env[62519]: DEBUG nova.compute.provider_tree [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.234688] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "af422ca1-7966-4bed-97bf-2b4c5285eaab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.234894] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "af422ca1-7966-4bed-97bf-2b4c5285eaab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.489354] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "34d2991e-b6df-473d-8994-e45ff57ef131" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.489665] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "34d2991e-b6df-473d-8994-e45ff57ef131" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.490137] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "34d2991e-b6df-473d-8994-e45ff57ef131-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.490137] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "34d2991e-b6df-473d-8994-e45ff57ef131-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.490253] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "34d2991e-b6df-473d-8994-e45ff57ef131-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.492144] env[62519]: INFO nova.compute.manager [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Terminating instance [ 1728.708685] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1728.735762] env[62519]: DEBUG nova.scheduler.client.report [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1728.739357] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1728.997616] env[62519]: DEBUG nova.compute.manager [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1728.997883] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1728.998834] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94175ec8-0c44-4ffb-b5d8-58dc39ae3a86 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.009926] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1729.010831] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a14e81cd-1c67-4509-94c8-ab0e76ab22f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.021608] env[62519]: DEBUG oslo_vmware.api [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1729.021608] env[62519]: value = "task-1802809" [ 1729.021608] env[62519]: _type = "Task" [ 1729.021608] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.030782] env[62519]: DEBUG oslo_vmware.api [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.236105] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.245021] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.245021] env[62519]: DEBUG nova.compute.manager [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1729.247608] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.412s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.252931] env[62519]: INFO nova.compute.claims [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1729.282497] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.488035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-acfac456-33c8-45db-84b1-8d13db70232a tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.922s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.534514] env[62519]: DEBUG oslo_vmware.api [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802809, 'name': PowerOffVM_Task, 'duration_secs': 0.245949} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.534514] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1729.534608] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1729.534882] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4caf4e0-7caf-49a0-a28f-e934f3ab7a3c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.622874] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1729.623145] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1729.623352] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleting the datastore file [datastore1] 34d2991e-b6df-473d-8994-e45ff57ef131 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1729.623788] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42184bb0-248a-41c3-b007-c14f25c5891f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.631629] env[62519]: DEBUG oslo_vmware.api [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for the task: (returnval){ [ 1729.631629] env[62519]: value = "task-1802811" [ 1729.631629] env[62519]: _type = "Task" [ 1729.631629] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.646313] env[62519]: DEBUG oslo_vmware.api [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.761612] env[62519]: DEBUG nova.compute.utils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1729.765730] env[62519]: DEBUG nova.compute.manager [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1729.766434] env[62519]: DEBUG nova.network.neutron [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1729.846044] env[62519]: DEBUG nova.policy [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '053a30aaf81b4cbd8ced7018ebfe1f40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e755fb5a6e94068b6c99b1638081f5f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1729.976766] env[62519]: DEBUG nova.compute.manager [req-6f9bbc2c-150d-43a8-b3ed-29e79c3b83dd req-bcc04932-72c6-4fc6-a03c-7c8621096fed service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Received event network-changed-8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1729.976766] env[62519]: DEBUG nova.compute.manager [req-6f9bbc2c-150d-43a8-b3ed-29e79c3b83dd req-bcc04932-72c6-4fc6-a03c-7c8621096fed service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Refreshing instance network info cache due to event network-changed-8c956f8c-d517-457d-add9-83f72e6718dc. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1729.976766] env[62519]: DEBUG oslo_concurrency.lockutils [req-6f9bbc2c-150d-43a8-b3ed-29e79c3b83dd req-bcc04932-72c6-4fc6-a03c-7c8621096fed service nova] Acquiring lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.976766] env[62519]: DEBUG oslo_concurrency.lockutils [req-6f9bbc2c-150d-43a8-b3ed-29e79c3b83dd req-bcc04932-72c6-4fc6-a03c-7c8621096fed service nova] Acquired lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.977560] env[62519]: DEBUG nova.network.neutron [req-6f9bbc2c-150d-43a8-b3ed-29e79c3b83dd req-bcc04932-72c6-4fc6-a03c-7c8621096fed service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Refreshing network info cache for port 8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1730.144433] env[62519]: DEBUG oslo_vmware.api [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Task: {'id': task-1802811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269935} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.144433] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1730.144433] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1730.144433] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1730.146080] env[62519]: INFO nova.compute.manager [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1730.146080] env[62519]: DEBUG oslo.service.loopingcall [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1730.146080] env[62519]: DEBUG nova.compute.manager [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1730.146080] env[62519]: DEBUG nova.network.neutron [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1730.270095] env[62519]: DEBUG nova.compute.manager [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1730.600506] env[62519]: DEBUG nova.network.neutron [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Successfully created port: c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1730.657700] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da107faa-aab1-43fd-9e9f-f2af098458a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.667343] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1e2410-62d3-4e76-8fca-c88ef191fb83 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.704801] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ada892-2b15-4ae1-8875-2fdc1870ab9d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.714894] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66fd322-f8b9-42f5-b821-d657f7bbad78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.731028] env[62519]: DEBUG nova.compute.provider_tree [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1731.234367] env[62519]: DEBUG nova.scheduler.client.report [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1731.284723] env[62519]: DEBUG nova.compute.manager [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1731.318403] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1731.318403] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1731.318403] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1731.318403] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1731.318659] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1731.318994] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1731.319402] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1731.319500] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1731.319675] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1731.320013] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1731.320182] env[62519]: DEBUG nova.virt.hardware [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1731.321115] env[62519]: DEBUG nova.network.neutron [req-6f9bbc2c-150d-43a8-b3ed-29e79c3b83dd req-bcc04932-72c6-4fc6-a03c-7c8621096fed service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Updated VIF entry in instance network info cache for port 8c956f8c-d517-457d-add9-83f72e6718dc. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1731.321437] env[62519]: DEBUG nova.network.neutron [req-6f9bbc2c-150d-43a8-b3ed-29e79c3b83dd req-bcc04932-72c6-4fc6-a03c-7c8621096fed service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Updating instance_info_cache with network_info: [{"id": "8c956f8c-d517-457d-add9-83f72e6718dc", "address": "fa:16:3e:80:d4:5e", "network": {"id": "d62cba85-2cb0-4dab-a59f-7004ed84a75f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-943660167-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5fd21cccbea4d67a6fa4237436af0d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c956f8c-d5", "ovs_interfaceid": "8c956f8c-d517-457d-add9-83f72e6718dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1731.326071] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28953ba-fa97-4f2b-8fe8-edd09b2fd3e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.339597] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee38044-b22a-4c58-b9c4-f037ab47d47a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.413759] env[62519]: DEBUG nova.compute.manager [req-2244e9ba-0165-4551-9d86-084479b19883 req-5937a322-1973-45ea-bf97-fc2e5014e1e0 service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Received event network-vif-deleted-e6b9aab2-f105-4c06-b204-f0626f41ccbe {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1731.413965] env[62519]: INFO nova.compute.manager [req-2244e9ba-0165-4551-9d86-084479b19883 req-5937a322-1973-45ea-bf97-fc2e5014e1e0 service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Neutron deleted interface e6b9aab2-f105-4c06-b204-f0626f41ccbe; detaching it from the instance and deleting it from the info cache [ 1731.414152] env[62519]: DEBUG nova.network.neutron [req-2244e9ba-0165-4551-9d86-084479b19883 req-5937a322-1973-45ea-bf97-fc2e5014e1e0 service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1731.608897] env[62519]: DEBUG nova.network.neutron [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1731.743319] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.743948] env[62519]: DEBUG nova.compute.manager [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1731.746590] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 7.480s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.833056] env[62519]: DEBUG oslo_concurrency.lockutils [req-6f9bbc2c-150d-43a8-b3ed-29e79c3b83dd req-bcc04932-72c6-4fc6-a03c-7c8621096fed service nova] Releasing lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.916345] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca2678f3-9115-4842-90ad-a7264948ffd9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.926878] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af02967d-bbaa-40b3-81d9-1591baa9716d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.967321] env[62519]: DEBUG nova.compute.manager [req-2244e9ba-0165-4551-9d86-084479b19883 req-5937a322-1973-45ea-bf97-fc2e5014e1e0 service nova] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Detach interface failed, port_id=e6b9aab2-f105-4c06-b204-f0626f41ccbe, reason: Instance 34d2991e-b6df-473d-8994-e45ff57ef131 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1732.111205] env[62519]: INFO nova.compute.manager [-] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Took 1.97 seconds to deallocate network for instance. [ 1732.253341] env[62519]: DEBUG nova.compute.utils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1732.259061] env[62519]: DEBUG nova.compute.manager [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1732.259061] env[62519]: DEBUG nova.network.neutron [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1732.354098] env[62519]: DEBUG nova.policy [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df8187760c614d92940ab5fabaede289', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7336e5aa2f9b42789cec48f07e586876', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1732.619167] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.677099] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43062e16-f0e3-451c-bac8-c8df5f3a80b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.687412] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fafff963-a341-4d42-a2a1-6b8ea4408434 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.721894] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8146a3d-3ef3-4841-9d58-494a3e0f79ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.731137] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c55239-0b1d-4742-a63a-e6d467ad24b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.746437] env[62519]: DEBUG nova.compute.provider_tree [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1732.761396] env[62519]: DEBUG nova.compute.manager [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1733.015009] env[62519]: DEBUG nova.network.neutron [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Successfully created port: af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1733.240657] env[62519]: DEBUG nova.network.neutron [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Successfully updated port: c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1733.251918] env[62519]: DEBUG nova.scheduler.client.report [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1733.621433] env[62519]: DEBUG nova.compute.manager [req-553e8aab-6b4e-4703-aee6-49f3c5b970b4 req-1d090fe9-10cf-4a20-b72e-944b54e99279 service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Received event network-changed-8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1733.621433] env[62519]: DEBUG nova.compute.manager [req-553e8aab-6b4e-4703-aee6-49f3c5b970b4 req-1d090fe9-10cf-4a20-b72e-944b54e99279 service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Refreshing instance network info cache due to event network-changed-8c956f8c-d517-457d-add9-83f72e6718dc. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1733.621433] env[62519]: DEBUG oslo_concurrency.lockutils [req-553e8aab-6b4e-4703-aee6-49f3c5b970b4 req-1d090fe9-10cf-4a20-b72e-944b54e99279 service nova] Acquiring lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.621433] env[62519]: DEBUG oslo_concurrency.lockutils [req-553e8aab-6b4e-4703-aee6-49f3c5b970b4 req-1d090fe9-10cf-4a20-b72e-944b54e99279 service nova] Acquired lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.621433] env[62519]: DEBUG nova.network.neutron [req-553e8aab-6b4e-4703-aee6-49f3c5b970b4 req-1d090fe9-10cf-4a20-b72e-944b54e99279 service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Refreshing network info cache for port 8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1733.732547] env[62519]: DEBUG nova.compute.manager [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Received event network-vif-plugged-c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1733.732727] env[62519]: DEBUG oslo_concurrency.lockutils [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] Acquiring lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.732900] env[62519]: DEBUG oslo_concurrency.lockutils [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] Lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.733119] env[62519]: DEBUG oslo_concurrency.lockutils [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] Lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.733294] env[62519]: DEBUG nova.compute.manager [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] No waiting events found dispatching network-vif-plugged-c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1733.733453] env[62519]: WARNING nova.compute.manager [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Received unexpected event network-vif-plugged-c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8 for instance with vm_state building and task_state spawning. [ 1733.733607] env[62519]: DEBUG nova.compute.manager [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Received event network-changed-c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1733.733751] env[62519]: DEBUG nova.compute.manager [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Refreshing instance network info cache due to event network-changed-c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1733.733944] env[62519]: DEBUG oslo_concurrency.lockutils [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] Acquiring lock "refresh_cache-e3a19583-b332-40e3-bdd0-d254f7a78b0a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.734122] env[62519]: DEBUG oslo_concurrency.lockutils [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] Acquired lock "refresh_cache-e3a19583-b332-40e3-bdd0-d254f7a78b0a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.734295] env[62519]: DEBUG nova.network.neutron [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Refreshing network info cache for port c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1733.740819] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "refresh_cache-e3a19583-b332-40e3-bdd0-d254f7a78b0a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.770770] env[62519]: DEBUG nova.compute.manager [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1733.819516] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1733.819516] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1733.819516] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1733.819516] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1733.819516] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1733.819918] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1733.820163] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1733.820328] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1733.820496] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1733.820654] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1733.820875] env[62519]: DEBUG nova.virt.hardware [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1733.823763] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46350082-3404-40f3-a7ee-fb67f5045b4d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.836030] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6154e507-22d7-4e43-9eb4-80e09ccca187 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.959101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.959379] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.269373] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.523s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.273534] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.161s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.275197] env[62519]: INFO nova.compute.claims [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1734.298500] env[62519]: DEBUG nova.network.neutron [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1734.455553] env[62519]: DEBUG nova.network.neutron [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.460230] env[62519]: DEBUG nova.network.neutron [req-553e8aab-6b4e-4703-aee6-49f3c5b970b4 req-1d090fe9-10cf-4a20-b72e-944b54e99279 service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Updated VIF entry in instance network info cache for port 8c956f8c-d517-457d-add9-83f72e6718dc. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1734.460581] env[62519]: DEBUG nova.network.neutron [req-553e8aab-6b4e-4703-aee6-49f3c5b970b4 req-1d090fe9-10cf-4a20-b72e-944b54e99279 service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Updating instance_info_cache with network_info: [{"id": "8c956f8c-d517-457d-add9-83f72e6718dc", "address": "fa:16:3e:80:d4:5e", "network": {"id": "d62cba85-2cb0-4dab-a59f-7004ed84a75f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-943660167-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5fd21cccbea4d67a6fa4237436af0d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c956f8c-d5", "ovs_interfaceid": "8c956f8c-d517-457d-add9-83f72e6718dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.465429] env[62519]: DEBUG nova.compute.manager [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1734.862968] env[62519]: INFO nova.scheduler.client.report [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted allocation for migration 7e488268-8aed-49a2-a0b0-dbd98ced6c2b [ 1734.961690] env[62519]: DEBUG oslo_concurrency.lockutils [req-1e4fa285-d6ac-46a0-a478-a383deb4c79b req-59e19387-40b8-4249-b560-935a73cbb6d1 service nova] Releasing lock "refresh_cache-e3a19583-b332-40e3-bdd0-d254f7a78b0a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.962732] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "refresh_cache-e3a19583-b332-40e3-bdd0-d254f7a78b0a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.962732] env[62519]: DEBUG nova.network.neutron [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1734.963564] env[62519]: DEBUG oslo_concurrency.lockutils [req-553e8aab-6b4e-4703-aee6-49f3c5b970b4 req-1d090fe9-10cf-4a20-b72e-944b54e99279 service nova] Releasing lock "refresh_cache-d710c97b-a2fd-4a54-baaa-ec7664895ce7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.985793] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.021818] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "2ea8304e-5b91-4908-a876-6e2c780b1da9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.022068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.089409] env[62519]: DEBUG nova.network.neutron [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Successfully updated port: af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1735.370736] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1e42fcec-c842-44cb-b7d8-0548bb9c2f2e tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 14.500s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.501082] env[62519]: DEBUG nova.network.neutron [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1735.525877] env[62519]: DEBUG nova.compute.manager [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1735.592407] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.592622] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquired lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.592695] env[62519]: DEBUG nova.network.neutron [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1735.682223] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72f97f9-a2c6-40e1-936e-5919142dcfec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.691282] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6cd34bc-1736-48c5-8e67-a8f646c5e0f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.697702] env[62519]: DEBUG nova.network.neutron [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Updating instance_info_cache with network_info: [{"id": "c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8", "address": "fa:16:3e:ac:d1:a8", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc119fc75-1f", "ovs_interfaceid": "c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.727020] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "refresh_cache-e3a19583-b332-40e3-bdd0-d254f7a78b0a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.727532] env[62519]: DEBUG nova.compute.manager [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Instance network_info: |[{"id": "c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8", "address": "fa:16:3e:ac:d1:a8", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc119fc75-1f", "ovs_interfaceid": "c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1735.728476] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb401361-5604-4851-8971-230e6e48a3f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.731991] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:d1:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1735.742134] env[62519]: DEBUG oslo.service.loopingcall [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1735.742571] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquiring lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.745549] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.745549] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquiring lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.745549] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.745549] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.745549] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1735.745934] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a548aa4d-f5f2-4e66-98e5-02a0de9e20d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.766202] env[62519]: INFO nova.compute.manager [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Terminating instance [ 1735.774244] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97a4b32-5b6d-47c3-bb79-2ead7108fa32 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.777997] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1735.777997] env[62519]: value = "task-1802812" [ 1735.777997] env[62519]: _type = "Task" [ 1735.777997] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.792022] env[62519]: DEBUG nova.compute.provider_tree [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1735.800398] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802812, 'name': CreateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.827024] env[62519]: DEBUG nova.compute.manager [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Received event network-vif-plugged-af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1735.827196] env[62519]: DEBUG oslo_concurrency.lockutils [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] Acquiring lock "156ed02a-3365-4a4f-b4de-ea86920d3baf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.827316] env[62519]: DEBUG oslo_concurrency.lockutils [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] Lock "156ed02a-3365-4a4f-b4de-ea86920d3baf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.827511] env[62519]: DEBUG oslo_concurrency.lockutils [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] Lock "156ed02a-3365-4a4f-b4de-ea86920d3baf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.827612] env[62519]: DEBUG nova.compute.manager [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] No waiting events found dispatching network-vif-plugged-af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1735.827736] env[62519]: WARNING nova.compute.manager [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Received unexpected event network-vif-plugged-af20c500-c1f3-427c-a59d-e8a17b0a6945 for instance with vm_state building and task_state spawning. [ 1735.827891] env[62519]: DEBUG nova.compute.manager [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Received event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1735.828278] env[62519]: DEBUG nova.compute.manager [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing instance network info cache due to event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1735.828499] env[62519]: DEBUG oslo_concurrency.lockutils [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] Acquiring lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.053432] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.140458] env[62519]: DEBUG nova.network.neutron [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1736.278868] env[62519]: DEBUG nova.compute.manager [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1736.279526] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1736.280511] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a891285-e5de-4c67-a0c1-b21e00484b36 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.295674] env[62519]: DEBUG nova.scheduler.client.report [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1736.298719] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802812, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.302022] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1736.302022] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-beb40e48-a8fb-4dea-85ca-ecdb0100b7ef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.310061] env[62519]: DEBUG oslo_vmware.api [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1736.310061] env[62519]: value = "task-1802813" [ 1736.310061] env[62519]: _type = "Task" [ 1736.310061] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.321394] env[62519]: DEBUG oslo_vmware.api [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.358447] env[62519]: DEBUG nova.network.neutron [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updating instance_info_cache with network_info: [{"id": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "address": "fa:16:3e:2a:c1:c6", "network": {"id": "e0faae63-5ec4-4eef-a323-174dc4b623b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1005479736-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "7336e5aa2f9b42789cec48f07e586876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf20c500-c1", "ovs_interfaceid": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.790299] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802812, 'name': CreateVM_Task, 'duration_secs': 0.750573} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.790477] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1736.791257] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.791425] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.791721] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1736.791974] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4edf126-d885-4fa1-bca2-00e46bfe073a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.798032] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1736.798032] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521d7361-5185-d3a8-8d00-b7036530a681" [ 1736.798032] env[62519]: _type = "Task" [ 1736.798032] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.805426] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.805426] env[62519]: DEBUG nova.compute.manager [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1736.812018] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.635s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.813497] env[62519]: INFO nova.compute.claims [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1736.817017] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521d7361-5185-d3a8-8d00-b7036530a681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.824805] env[62519]: DEBUG oslo_vmware.api [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802813, 'name': PowerOffVM_Task, 'duration_secs': 0.231637} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.826370] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1736.826370] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1736.826370] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6bc1910-caea-4c21-8d12-da8438ab0b12 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.860076] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Releasing lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.860584] env[62519]: DEBUG nova.compute.manager [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Instance network_info: |[{"id": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "address": "fa:16:3e:2a:c1:c6", "network": {"id": "e0faae63-5ec4-4eef-a323-174dc4b623b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1005479736-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "7336e5aa2f9b42789cec48f07e586876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf20c500-c1", "ovs_interfaceid": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1736.860937] env[62519]: DEBUG oslo_concurrency.lockutils [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] Acquired lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.861355] env[62519]: DEBUG nova.network.neutron [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1736.863227] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:c1:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af20c500-c1f3-427c-a59d-e8a17b0a6945', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1736.872677] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Creating folder: Project (7336e5aa2f9b42789cec48f07e586876). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1736.873489] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da325916-46a6-40fe-9b94-26e9388ddae5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.886883] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Created folder: Project (7336e5aa2f9b42789cec48f07e586876) in parent group-v373567. [ 1736.887372] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Creating folder: Instances. Parent ref: group-v373776. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1736.887712] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f7d28b5-4c8b-49e6-9424-2372222d0ce8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.898491] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Created folder: Instances in parent group-v373776. [ 1736.903329] env[62519]: DEBUG oslo.service.loopingcall [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1736.903329] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1736.903329] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de3bfa7c-6cd6-44b6-84db-8478c0fece85 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.923691] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1736.923691] env[62519]: value = "task-1802817" [ 1736.923691] env[62519]: _type = "Task" [ 1736.923691] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.932507] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802817, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.957862] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1736.957862] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1736.957862] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Deleting the datastore file [datastore1] d710c97b-a2fd-4a54-baaa-ec7664895ce7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1736.958068] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9cfd8d96-cb2e-4a5a-9e89-8283f8f60ff7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.967040] env[62519]: DEBUG oslo_vmware.api [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for the task: (returnval){ [ 1736.967040] env[62519]: value = "task-1802818" [ 1736.967040] env[62519]: _type = "Task" [ 1736.967040] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.978149] env[62519]: DEBUG oslo_vmware.api [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.308575] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521d7361-5185-d3a8-8d00-b7036530a681, 'name': SearchDatastore_Task, 'duration_secs': 0.009575} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.309110] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.309490] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1737.311422] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.311635] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.311937] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1737.312405] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a29c5e2c-5056-4dd4-8f3b-dc804733cd1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.320156] env[62519]: DEBUG nova.compute.utils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1737.323532] env[62519]: DEBUG nova.compute.manager [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1737.324087] env[62519]: DEBUG nova.network.neutron [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1737.327265] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1737.327594] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1737.330467] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-621df432-0cac-446a-8a3d-6ee6ac3f6528 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.341639] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1737.341639] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dbd0a4-a412-395b-e935-e020bb3d0503" [ 1737.341639] env[62519]: _type = "Task" [ 1737.341639] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.353663] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dbd0a4-a412-395b-e935-e020bb3d0503, 'name': SearchDatastore_Task, 'duration_secs': 0.010461} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.354171] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-109df894-8290-4fd9-ad6b-bdd31c63c6ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.363444] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1737.363444] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c6d6ef-70ce-3c7f-7a4b-9b8647900333" [ 1737.363444] env[62519]: _type = "Task" [ 1737.363444] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.373060] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c6d6ef-70ce-3c7f-7a4b-9b8647900333, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.435939] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802817, 'name': CreateVM_Task, 'duration_secs': 0.479989} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.435939] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1737.436233] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.436379] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.436689] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1737.436928] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67f5352c-21b9-48d1-b03d-90350118c77c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.442135] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1737.442135] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527f27c2-57c8-c02b-2bba-3947df8eb2c0" [ 1737.442135] env[62519]: _type = "Task" [ 1737.442135] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.450962] env[62519]: DEBUG nova.policy [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9b4ac19e10d4abdb7d7e54e5d093a8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0755b34e22d4478817ec4e2d57aac2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1737.456440] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527f27c2-57c8-c02b-2bba-3947df8eb2c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.477234] env[62519]: DEBUG oslo_vmware.api [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Task: {'id': task-1802818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134254} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.477485] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1737.477784] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1737.477864] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1737.478019] env[62519]: INFO nova.compute.manager [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1737.478314] env[62519]: DEBUG oslo.service.loopingcall [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1737.478514] env[62519]: DEBUG nova.compute.manager [-] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1737.478606] env[62519]: DEBUG nova.network.neutron [-] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1737.566034] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.566358] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.566598] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.567022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.567022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.570024] env[62519]: INFO nova.compute.manager [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Terminating instance [ 1737.830708] env[62519]: DEBUG nova.compute.manager [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1737.840136] env[62519]: DEBUG nova.network.neutron [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updated VIF entry in instance network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1737.840136] env[62519]: DEBUG nova.network.neutron [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updating instance_info_cache with network_info: [{"id": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "address": "fa:16:3e:2a:c1:c6", "network": {"id": "e0faae63-5ec4-4eef-a323-174dc4b623b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1005479736-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "7336e5aa2f9b42789cec48f07e586876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf20c500-c1", "ovs_interfaceid": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.874513] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c6d6ef-70ce-3c7f-7a4b-9b8647900333, 'name': SearchDatastore_Task, 'duration_secs': 0.011186} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.875313] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.875582] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] e3a19583-b332-40e3-bdd0-d254f7a78b0a/e3a19583-b332-40e3-bdd0-d254f7a78b0a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1737.875842] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3fbe8ba-e581-4782-80f8-b55ba1091a45 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.884069] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1737.884069] env[62519]: value = "task-1802819" [ 1737.884069] env[62519]: _type = "Task" [ 1737.884069] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.895894] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802819, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.959678] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527f27c2-57c8-c02b-2bba-3947df8eb2c0, 'name': SearchDatastore_Task, 'duration_secs': 0.011075} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.959678] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.960276] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1737.960565] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.960746] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.960998] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1737.961386] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17725850-1ab8-47fa-9ee2-e2363f7128b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.971772] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1737.971960] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1737.976594] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bce8bc5b-3850-4196-9c68-782f47cfcd6d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.983900] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1737.983900] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52844542-244c-2073-c4b2-f748f459327e" [ 1737.983900] env[62519]: _type = "Task" [ 1737.983900] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.993626] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52844542-244c-2073-c4b2-f748f459327e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.026142] env[62519]: DEBUG nova.network.neutron [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Successfully created port: 6040576e-e4b2-41ff-bd84-514c3ff9090c {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1738.060340] env[62519]: DEBUG nova.compute.manager [req-2b8f5629-bae3-462e-aa18-30c746d1855b req-f8b5b814-7930-4c7d-b955-16ef1789d113 service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Received event network-vif-deleted-8c956f8c-d517-457d-add9-83f72e6718dc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1738.060592] env[62519]: INFO nova.compute.manager [req-2b8f5629-bae3-462e-aa18-30c746d1855b req-f8b5b814-7930-4c7d-b955-16ef1789d113 service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Neutron deleted interface 8c956f8c-d517-457d-add9-83f72e6718dc; detaching it from the instance and deleting it from the info cache [ 1738.060788] env[62519]: DEBUG nova.network.neutron [req-2b8f5629-bae3-462e-aa18-30c746d1855b req-f8b5b814-7930-4c7d-b955-16ef1789d113 service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.074059] env[62519]: DEBUG nova.compute.manager [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1738.074368] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1738.079696] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd1c217-f450-4736-abf7-a57397d4745c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.092243] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1738.092549] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cd7b9d5-9106-4795-9b3c-e5464a560a1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.103646] env[62519]: DEBUG oslo_vmware.api [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1738.103646] env[62519]: value = "task-1802820" [ 1738.103646] env[62519]: _type = "Task" [ 1738.103646] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.118813] env[62519]: DEBUG oslo_vmware.api [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.279760] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bee6855-46b1-4f2b-aa55-f4be826c535a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.292796] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb60d03-f832-45dc-8695-a5bd63838024 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.343931] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d74a566-e79a-48c2-a315-0e8be6bb401b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.347893] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "5da884af-d8d2-409b-99bd-e5370e44e9f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.347893] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.351082] env[62519]: DEBUG oslo_concurrency.lockutils [req-c37ce082-b55d-4082-982b-d3bf4f63a5b6 req-fbba22c4-fdb7-4625-bb46-21676c450d5e service nova] Releasing lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.359718] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8a6e88-b06a-42f2-b8ee-a54cd1decddb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.379618] env[62519]: DEBUG nova.compute.provider_tree [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1738.397015] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802819, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48412} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.397249] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] e3a19583-b332-40e3-bdd0-d254f7a78b0a/e3a19583-b332-40e3-bdd0-d254f7a78b0a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1738.397466] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1738.397720] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9b5242a-33d8-4543-bd80-d997dd744e02 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.405614] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1738.405614] env[62519]: value = "task-1802821" [ 1738.405614] env[62519]: _type = "Task" [ 1738.405614] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.416329] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802821, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.473744] env[62519]: DEBUG nova.network.neutron [-] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.496637] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52844542-244c-2073-c4b2-f748f459327e, 'name': SearchDatastore_Task, 'duration_secs': 0.013474} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.497897] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d31e02d6-baca-4ff8-b93e-7c07963cd144 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.506137] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1738.506137] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b73810-b65b-9b2f-f749-0705fb61ed30" [ 1738.506137] env[62519]: _type = "Task" [ 1738.506137] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.516320] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b73810-b65b-9b2f-f749-0705fb61ed30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.563081] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f087f69c-13f3-4d70-8429-1ca624074136 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.573871] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c850a0dd-3a31-490c-bf3c-53ba31a6b37c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.607247] env[62519]: DEBUG nova.compute.manager [req-2b8f5629-bae3-462e-aa18-30c746d1855b req-f8b5b814-7930-4c7d-b955-16ef1789d113 service nova] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Detach interface failed, port_id=8c956f8c-d517-457d-add9-83f72e6718dc, reason: Instance d710c97b-a2fd-4a54-baaa-ec7664895ce7 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1738.616585] env[62519]: DEBUG oslo_vmware.api [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802820, 'name': PowerOffVM_Task, 'duration_secs': 0.335052} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.616836] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1738.617127] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1738.617256] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f63cb83e-44d4-41bc-b8e4-95eadb130c9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.703570] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1738.703801] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1738.703984] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleting the datastore file [datastore1] ad0af10d-5063-4344-b12f-1d3ee9ea1090 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1738.704281] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a187b0c2-ca27-4019-9d2f-253202fa8450 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.714660] env[62519]: DEBUG oslo_vmware.api [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1738.714660] env[62519]: value = "task-1802823" [ 1738.714660] env[62519]: _type = "Task" [ 1738.714660] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.722873] env[62519]: DEBUG oslo_vmware.api [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802823, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.848049] env[62519]: DEBUG nova.compute.manager [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1738.853278] env[62519]: DEBUG nova.compute.manager [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1738.879791] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1738.880033] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1738.880192] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1738.880372] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1738.880517] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1738.880657] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1738.880855] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1738.881020] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1738.881246] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1738.881418] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1738.881577] env[62519]: DEBUG nova.virt.hardware [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1738.882461] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4083d5de-1dda-40d5-8f83-91925d240eb3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.885586] env[62519]: DEBUG nova.scheduler.client.report [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1738.895188] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bd45b6-846e-4798-b6ce-5adee4a251f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.918870] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802821, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065294} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.919172] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1738.919942] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acefbad-9adb-489b-99d2-f753911fc134 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.954921] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] e3a19583-b332-40e3-bdd0-d254f7a78b0a/e3a19583-b332-40e3-bdd0-d254f7a78b0a.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1738.955690] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4dbc20aa-7069-479f-9c04-35e2cc5a8326 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.976551] env[62519]: INFO nova.compute.manager [-] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Took 1.50 seconds to deallocate network for instance. [ 1738.979538] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1738.979538] env[62519]: value = "task-1802824" [ 1738.979538] env[62519]: _type = "Task" [ 1738.979538] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.991617] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802824, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.017655] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b73810-b65b-9b2f-f749-0705fb61ed30, 'name': SearchDatastore_Task, 'duration_secs': 0.009986} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.017915] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.018190] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 156ed02a-3365-4a4f-b4de-ea86920d3baf/156ed02a-3365-4a4f-b4de-ea86920d3baf.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1739.018482] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8015757e-b30d-43c5-84ef-650a39a1c2ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.025117] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1739.025117] env[62519]: value = "task-1802825" [ 1739.025117] env[62519]: _type = "Task" [ 1739.025117] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.033293] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802825, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.227336] env[62519]: DEBUG oslo_vmware.api [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802823, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145607} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.227605] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1739.227790] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1739.227963] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1739.228149] env[62519]: INFO nova.compute.manager [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1739.228432] env[62519]: DEBUG oslo.service.loopingcall [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1739.228639] env[62519]: DEBUG nova.compute.manager [-] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1739.228733] env[62519]: DEBUG nova.network.neutron [-] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1739.388582] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.391626] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.579s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.393182] env[62519]: DEBUG nova.compute.manager [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1739.396182] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.070s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.396455] env[62519]: DEBUG nova.objects.instance [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lazy-loading 'resources' on Instance uuid c60f5d73-9d6d-4b5f-b71b-00b6b787d482 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1739.490349] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.497009] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802824, 'name': ReconfigVM_Task, 'duration_secs': 0.500841} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.497338] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Reconfigured VM instance instance-0000004d to attach disk [datastore1] e3a19583-b332-40e3-bdd0-d254f7a78b0a/e3a19583-b332-40e3-bdd0-d254f7a78b0a.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1739.498050] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be7deb19-1205-499d-adda-c9ba42698def {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.507032] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1739.507032] env[62519]: value = "task-1802826" [ 1739.507032] env[62519]: _type = "Task" [ 1739.507032] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.516265] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802826, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.536128] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802825, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501393} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.536488] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 156ed02a-3365-4a4f-b4de-ea86920d3baf/156ed02a-3365-4a4f-b4de-ea86920d3baf.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1739.536737] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1739.537009] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-680ad706-37da-4b80-b1f4-26f013b0e24c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.545153] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1739.545153] env[62519]: value = "task-1802827" [ 1739.545153] env[62519]: _type = "Task" [ 1739.545153] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.553764] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802827, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.877084] env[62519]: DEBUG nova.network.neutron [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Successfully updated port: 6040576e-e4b2-41ff-bd84-514c3ff9090c {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1739.902032] env[62519]: DEBUG nova.compute.utils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1739.906976] env[62519]: DEBUG nova.compute.manager [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1739.906976] env[62519]: DEBUG nova.network.neutron [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1739.974931] env[62519]: DEBUG nova.policy [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b04fa80750184b97a16ec1880e0a585c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '683d60927fdf424386ffcfaa344a7af6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1740.018774] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802826, 'name': Rename_Task, 'duration_secs': 0.368428} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.019015] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1740.019436] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-683cf3a3-a8a4-491c-8f36-17a17353c073 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.029915] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1740.029915] env[62519]: value = "task-1802828" [ 1740.029915] env[62519]: _type = "Task" [ 1740.029915] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.038868] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802828, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.054025] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802827, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067334} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.056620] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1740.057603] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2eaf7b-27fa-400a-9f42-bc8a5f5eb536 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.080420] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 156ed02a-3365-4a4f-b4de-ea86920d3baf/156ed02a-3365-4a4f-b4de-ea86920d3baf.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1740.083161] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e9daac8-bc58-4134-8c87-23ea948152fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.105313] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1740.105313] env[62519]: value = "task-1802829" [ 1740.105313] env[62519]: _type = "Task" [ 1740.105313] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.107975] env[62519]: DEBUG nova.compute.manager [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Received event network-vif-plugged-6040576e-e4b2-41ff-bd84-514c3ff9090c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1740.108268] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] Acquiring lock "dd60afd6-2834-4fca-a846-e39d57aabd60-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.108501] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] Lock "dd60afd6-2834-4fca-a846-e39d57aabd60-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.108657] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] Lock "dd60afd6-2834-4fca-a846-e39d57aabd60-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.108845] env[62519]: DEBUG nova.compute.manager [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] No waiting events found dispatching network-vif-plugged-6040576e-e4b2-41ff-bd84-514c3ff9090c {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1740.108965] env[62519]: WARNING nova.compute.manager [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Received unexpected event network-vif-plugged-6040576e-e4b2-41ff-bd84-514c3ff9090c for instance with vm_state building and task_state spawning. [ 1740.109138] env[62519]: DEBUG nova.compute.manager [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Received event network-vif-deleted-925db578-876c-414b-8500-56c73c7cdfe8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1740.109308] env[62519]: INFO nova.compute.manager [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Neutron deleted interface 925db578-876c-414b-8500-56c73c7cdfe8; detaching it from the instance and deleting it from the info cache [ 1740.109467] env[62519]: DEBUG nova.network.neutron [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.124114] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802829, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.177907] env[62519]: DEBUG nova.network.neutron [-] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.290646] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db5af06-794a-4619-9cf7-7cf5ab0b4701 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.304864] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c87aa94-c57d-4082-a697-cc46ba4bf528 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.343471] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c74493-783c-44fe-ac47-73aecde877cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.352796] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5d0a34-08b1-4267-bb89-1d3732a50bc3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.369232] env[62519]: DEBUG nova.compute.provider_tree [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1740.380264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "refresh_cache-dd60afd6-2834-4fca-a846-e39d57aabd60" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.380264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "refresh_cache-dd60afd6-2834-4fca-a846-e39d57aabd60" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.380264] env[62519]: DEBUG nova.network.neutron [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1740.406954] env[62519]: DEBUG nova.compute.manager [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1740.438104] env[62519]: DEBUG nova.network.neutron [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Successfully created port: 3a9391b3-bc92-4564-b965-73ca1d19957d {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1740.540134] env[62519]: DEBUG oslo_vmware.api [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802828, 'name': PowerOnVM_Task, 'duration_secs': 0.47586} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.540431] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1740.540664] env[62519]: INFO nova.compute.manager [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1740.540841] env[62519]: DEBUG nova.compute.manager [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1740.541621] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e66646a-5b2b-439c-b601-4afc3e8efe7e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.618841] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802829, 'name': ReconfigVM_Task, 'duration_secs': 0.28677} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.619083] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70a17500-5086-499c-8ae9-cbbb1f6e4320 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.621010] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 156ed02a-3365-4a4f-b4de-ea86920d3baf/156ed02a-3365-4a4f-b4de-ea86920d3baf.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1740.621632] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6396b82b-bfb3-4866-b55e-f454020f5380 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.633121] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d79793-8e34-4f66-a542-e6843055a758 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.646610] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1740.646610] env[62519]: value = "task-1802830" [ 1740.646610] env[62519]: _type = "Task" [ 1740.646610] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.658712] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802830, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.674888] env[62519]: DEBUG nova.compute.manager [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Detach interface failed, port_id=925db578-876c-414b-8500-56c73c7cdfe8, reason: Instance ad0af10d-5063-4344-b12f-1d3ee9ea1090 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1740.674888] env[62519]: DEBUG nova.compute.manager [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Received event network-changed-6040576e-e4b2-41ff-bd84-514c3ff9090c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1740.675410] env[62519]: DEBUG nova.compute.manager [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Refreshing instance network info cache due to event network-changed-6040576e-e4b2-41ff-bd84-514c3ff9090c. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1740.675664] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] Acquiring lock "refresh_cache-dd60afd6-2834-4fca-a846-e39d57aabd60" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.680384] env[62519]: INFO nova.compute.manager [-] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Took 1.45 seconds to deallocate network for instance. [ 1740.872133] env[62519]: DEBUG nova.scheduler.client.report [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1740.918288] env[62519]: DEBUG nova.network.neutron [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1741.059295] env[62519]: DEBUG nova.network.neutron [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Updating instance_info_cache with network_info: [{"id": "6040576e-e4b2-41ff-bd84-514c3ff9090c", "address": "fa:16:3e:8b:00:fa", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6040576e-e4", "ovs_interfaceid": "6040576e-e4b2-41ff-bd84-514c3ff9090c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.061217] env[62519]: INFO nova.compute.manager [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Took 19.27 seconds to build instance. [ 1741.160465] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802830, 'name': Rename_Task, 'duration_secs': 0.156918} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.162175] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1741.162447] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-366f5462-cacb-4637-883c-4a959774d30e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.171424] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1741.171424] env[62519]: value = "task-1802831" [ 1741.171424] env[62519]: _type = "Task" [ 1741.171424] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.181783] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802831, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.193480] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.377182] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.981s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.382372] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.023s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.382712] env[62519]: DEBUG nova.objects.instance [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lazy-loading 'resources' on Instance uuid 0789b142-4712-4b7a-9197-c3689f24df7c {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1741.400589] env[62519]: INFO nova.scheduler.client.report [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted allocations for instance c60f5d73-9d6d-4b5f-b71b-00b6b787d482 [ 1741.417253] env[62519]: DEBUG nova.compute.manager [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1741.452783] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1741.453087] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1741.453299] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1741.453538] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1741.453708] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1741.453858] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1741.454172] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1741.454253] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1741.454449] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1741.454839] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1741.455059] env[62519]: DEBUG nova.virt.hardware [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1741.455990] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc9988e-8875-4271-aa95-a33e10f29b41 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.465918] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c9018d-740e-4b03-8725-a4c63f2c9c58 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.562301] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "refresh_cache-dd60afd6-2834-4fca-a846-e39d57aabd60" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.562699] env[62519]: DEBUG nova.compute.manager [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Instance network_info: |[{"id": "6040576e-e4b2-41ff-bd84-514c3ff9090c", "address": "fa:16:3e:8b:00:fa", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6040576e-e4", "ovs_interfaceid": "6040576e-e4b2-41ff-bd84-514c3ff9090c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1741.563846] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] Acquired lock "refresh_cache-dd60afd6-2834-4fca-a846-e39d57aabd60" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1741.564193] env[62519]: DEBUG nova.network.neutron [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Refreshing network info cache for port 6040576e-e4b2-41ff-bd84-514c3ff9090c {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1741.565537] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:00:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '594b35bb-a20b-4f0e-bd35-9acf9cc6bf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6040576e-e4b2-41ff-bd84-514c3ff9090c', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1741.575059] env[62519]: DEBUG oslo.service.loopingcall [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1741.577421] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1741.578245] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5b0e1d6-6691-4a8b-b6ed-54f67af856ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.604633] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1741.604633] env[62519]: value = "task-1802832" [ 1741.604633] env[62519]: _type = "Task" [ 1741.604633] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.615599] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802832, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.681588] env[62519]: DEBUG oslo_vmware.api [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802831, 'name': PowerOnVM_Task, 'duration_secs': 0.49232} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.681871] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1741.682152] env[62519]: INFO nova.compute.manager [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Took 7.91 seconds to spawn the instance on the hypervisor. [ 1741.682319] env[62519]: DEBUG nova.compute.manager [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1741.683146] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532ff656-e481-46ae-b8ed-dbabe071a577 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.910171] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0c4ed0c5-e3d7-4fce-b37b-6c40f24aa9e8 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "c60f5d73-9d6d-4b5f-b71b-00b6b787d482" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.314s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.989565] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.085012] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38e1cc42-c585-4d7c-ba6b-635100d384aa tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.307s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.085902] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.096s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.086299] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.086581] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.086843] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.092788] env[62519]: INFO nova.compute.manager [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Terminating instance [ 1742.117860] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802832, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.158454] env[62519]: DEBUG nova.network.neutron [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Successfully updated port: 3a9391b3-bc92-4564-b965-73ca1d19957d {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1742.173731] env[62519]: DEBUG nova.compute.manager [req-b1753546-2ad1-4f75-be47-6a85990c5bd8 req-6cfa40c1-c5dd-4bd2-9ba2-cbbab3ce041f service nova] [instance: c8816718-0a35-4474-b162-c619b0acc154] Received event network-vif-plugged-3a9391b3-bc92-4564-b965-73ca1d19957d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1742.174418] env[62519]: DEBUG oslo_concurrency.lockutils [req-b1753546-2ad1-4f75-be47-6a85990c5bd8 req-6cfa40c1-c5dd-4bd2-9ba2-cbbab3ce041f service nova] Acquiring lock "c8816718-0a35-4474-b162-c619b0acc154-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.176226] env[62519]: DEBUG oslo_concurrency.lockutils [req-b1753546-2ad1-4f75-be47-6a85990c5bd8 req-6cfa40c1-c5dd-4bd2-9ba2-cbbab3ce041f service nova] Lock "c8816718-0a35-4474-b162-c619b0acc154-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.176226] env[62519]: DEBUG oslo_concurrency.lockutils [req-b1753546-2ad1-4f75-be47-6a85990c5bd8 req-6cfa40c1-c5dd-4bd2-9ba2-cbbab3ce041f service nova] Lock "c8816718-0a35-4474-b162-c619b0acc154-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.176226] env[62519]: DEBUG nova.compute.manager [req-b1753546-2ad1-4f75-be47-6a85990c5bd8 req-6cfa40c1-c5dd-4bd2-9ba2-cbbab3ce041f service nova] [instance: c8816718-0a35-4474-b162-c619b0acc154] No waiting events found dispatching network-vif-plugged-3a9391b3-bc92-4564-b965-73ca1d19957d {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1742.176226] env[62519]: WARNING nova.compute.manager [req-b1753546-2ad1-4f75-be47-6a85990c5bd8 req-6cfa40c1-c5dd-4bd2-9ba2-cbbab3ce041f service nova] [instance: c8816718-0a35-4474-b162-c619b0acc154] Received unexpected event network-vif-plugged-3a9391b3-bc92-4564-b965-73ca1d19957d for instance with vm_state building and task_state spawning. [ 1742.206076] env[62519]: INFO nova.compute.manager [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Took 19.39 seconds to build instance. [ 1742.337627] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece1db45-801b-4ff0-8fdb-1fbeb907ad73 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.346335] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87efd056-0df5-4e50-b0f8-5eaf0f59dc3b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.379627] env[62519]: DEBUG nova.network.neutron [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Updated VIF entry in instance network info cache for port 6040576e-e4b2-41ff-bd84-514c3ff9090c. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1742.379801] env[62519]: DEBUG nova.network.neutron [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Updating instance_info_cache with network_info: [{"id": "6040576e-e4b2-41ff-bd84-514c3ff9090c", "address": "fa:16:3e:8b:00:fa", "network": {"id": "0ccefefe-8ea4-4ea3-91a2-32d7697da357", "bridge": "br-int", "label": "tempest-ImagesTestJSON-41230602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0755b34e22d4478817ec4e2d57aac2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "594b35bb-a20b-4f0e-bd35-9acf9cc6bf11", "external-id": "nsx-vlan-transportzone-299", "segmentation_id": 299, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6040576e-e4", "ovs_interfaceid": "6040576e-e4b2-41ff-bd84-514c3ff9090c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1742.381430] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e950c109-8aab-4d60-a7b1-52ca24535c91 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.389626] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd215a99-611b-4531-a026-1d6839813626 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.403966] env[62519]: DEBUG nova.compute.provider_tree [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1742.601325] env[62519]: DEBUG nova.compute.manager [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1742.601325] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1742.601325] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66b3597-287b-4577-8227-be74fc56589a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.611477] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1742.612390] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3dd449e5-8d37-4efa-b464-53efea0b4f89 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.620033] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802832, 'name': CreateVM_Task, 'duration_secs': 0.666858} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.620033] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1742.620033] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.620033] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.620033] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1742.620033] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-699c50be-63b4-4eec-8735-beea5df445ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.622968] env[62519]: DEBUG oslo_vmware.api [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1742.622968] env[62519]: value = "task-1802833" [ 1742.622968] env[62519]: _type = "Task" [ 1742.622968] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.631702] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1742.631702] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f52966-ff4c-36eb-d630-281674914f34" [ 1742.631702] env[62519]: _type = "Task" [ 1742.631702] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.640493] env[62519]: DEBUG oslo_vmware.api [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.649406] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f52966-ff4c-36eb-d630-281674914f34, 'name': SearchDatastore_Task, 'duration_secs': 0.01161} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.649406] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.649406] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1742.649406] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.649406] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.649406] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1742.649406] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2851657-9a0f-4b56-8902-dc90097b699a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.658018] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1742.658018] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1742.658018] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31294cfb-ff9e-481c-97ec-a3827eec6669 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.663535] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-c8816718-0a35-4474-b162-c619b0acc154" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.664012] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-c8816718-0a35-4474-b162-c619b0acc154" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.666096] env[62519]: DEBUG nova.network.neutron [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1742.666096] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1742.666096] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ce6de9-340a-154b-8822-e770d3477264" [ 1742.666096] env[62519]: _type = "Task" [ 1742.666096] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.676156] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ce6de9-340a-154b-8822-e770d3477264, 'name': SearchDatastore_Task, 'duration_secs': 0.009865} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.679321] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58f2c875-8e48-45a2-b3b1-339101d2fc3d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.686590] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1742.686590] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52565c5c-c52f-977a-9c6f-3dfd2ef0d2d5" [ 1742.686590] env[62519]: _type = "Task" [ 1742.686590] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.695778] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52565c5c-c52f-977a-9c6f-3dfd2ef0d2d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.885228] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb4a911b-a667-4bce-9b15-1d7f040b02cd req-d18c4612-9a45-4324-a0b6-0c1f26f589e8 service nova] Releasing lock "refresh_cache-dd60afd6-2834-4fca-a846-e39d57aabd60" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.907499] env[62519]: DEBUG nova.scheduler.client.report [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1743.134258] env[62519]: DEBUG oslo_vmware.api [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802833, 'name': PowerOffVM_Task, 'duration_secs': 0.21395} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.134579] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1743.134745] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1743.135197] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6653242c-61b0-493e-967d-a47d9499f172 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.201254] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52565c5c-c52f-977a-9c6f-3dfd2ef0d2d5, 'name': SearchDatastore_Task, 'duration_secs': 0.010554} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.201661] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.201975] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] dd60afd6-2834-4fca-a846-e39d57aabd60/dd60afd6-2834-4fca-a846-e39d57aabd60.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1743.202323] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ca9ba96-e5a3-474a-b973-f63a5cc87dd7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.206957] env[62519]: DEBUG nova.network.neutron [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1743.213937] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1743.213937] env[62519]: value = "task-1802835" [ 1743.213937] env[62519]: _type = "Task" [ 1743.213937] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.223569] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.280023] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1743.280023] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1743.280023] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleting the datastore file [datastore1] e3a19583-b332-40e3-bdd0-d254f7a78b0a {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1743.280023] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e89838a4-774d-430e-9a37-722f53a3c50c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.290167] env[62519]: INFO nova.compute.manager [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Rescuing [ 1743.290649] env[62519]: DEBUG oslo_concurrency.lockutils [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.290946] env[62519]: DEBUG oslo_concurrency.lockutils [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquired lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.291258] env[62519]: DEBUG nova.network.neutron [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1743.292667] env[62519]: DEBUG oslo_vmware.api [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1743.292667] env[62519]: value = "task-1802836" [ 1743.292667] env[62519]: _type = "Task" [ 1743.292667] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.303894] env[62519]: DEBUG oslo_vmware.api [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.415043] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.415730] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.180s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.417850] env[62519]: INFO nova.compute.claims [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1743.441061] env[62519]: INFO nova.scheduler.client.report [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Deleted allocations for instance 0789b142-4712-4b7a-9197-c3689f24df7c [ 1743.473312] env[62519]: DEBUG nova.network.neutron [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Updating instance_info_cache with network_info: [{"id": "3a9391b3-bc92-4564-b965-73ca1d19957d", "address": "fa:16:3e:23:3a:5a", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a9391b3-bc", "ovs_interfaceid": "3a9391b3-bc92-4564-b965-73ca1d19957d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.713325] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fd0f8cc-63e6-40bc-a024-cf62b0b44c98 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "156ed02a-3365-4a4f-b4de-ea86920d3baf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.900s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.724791] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802835, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494251} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.728259] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] dd60afd6-2834-4fca-a846-e39d57aabd60/dd60afd6-2834-4fca-a846-e39d57aabd60.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1743.728259] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1743.728259] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44136e3d-3393-48db-9b8e-9742fb528d6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.735346] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1743.735346] env[62519]: value = "task-1802837" [ 1743.735346] env[62519]: _type = "Task" [ 1743.735346] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.743364] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802837, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.809196] env[62519]: DEBUG oslo_vmware.api [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.432415} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.809196] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1743.809196] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1743.809196] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1743.809196] env[62519]: INFO nova.compute.manager [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1743.809196] env[62519]: DEBUG oslo.service.loopingcall [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1743.809196] env[62519]: DEBUG nova.compute.manager [-] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1743.809196] env[62519]: DEBUG nova.network.neutron [-] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1743.952298] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aae7d2f2-bba5-46f9-99c5-5b3533414c8a tempest-AttachInterfacesV270Test-1088965964 tempest-AttachInterfacesV270Test-1088965964-project-member] Lock "0789b142-4712-4b7a-9197-c3689f24df7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.183s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.977158] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-c8816718-0a35-4474-b162-c619b0acc154" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.977726] env[62519]: DEBUG nova.compute.manager [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Instance network_info: |[{"id": "3a9391b3-bc92-4564-b965-73ca1d19957d", "address": "fa:16:3e:23:3a:5a", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a9391b3-bc", "ovs_interfaceid": "3a9391b3-bc92-4564-b965-73ca1d19957d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1743.980017] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:3a:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a9391b3-bc92-4564-b965-73ca1d19957d', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1743.988121] env[62519]: DEBUG oslo.service.loopingcall [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1743.989337] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8816718-0a35-4474-b162-c619b0acc154] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1743.989722] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32cfdc0d-3603-415f-9a6e-4d6704b32bc5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.018019] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1744.018019] env[62519]: value = "task-1802838" [ 1744.018019] env[62519]: _type = "Task" [ 1744.018019] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.025867] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802838, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.153933] env[62519]: DEBUG nova.network.neutron [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updating instance_info_cache with network_info: [{"id": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "address": "fa:16:3e:2a:c1:c6", "network": {"id": "e0faae63-5ec4-4eef-a323-174dc4b623b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1005479736-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "7336e5aa2f9b42789cec48f07e586876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf20c500-c1", "ovs_interfaceid": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.249814] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802837, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067956} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.250173] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1744.251791] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497ac6b5-1eb3-442d-a202-5af46b1b6882 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.279626] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] dd60afd6-2834-4fca-a846-e39d57aabd60/dd60afd6-2834-4fca-a846-e39d57aabd60.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1744.280716] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2b3b4ee-8bcf-462d-b8d7-03390e19b406 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.305949] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1744.305949] env[62519]: value = "task-1802839" [ 1744.305949] env[62519]: _type = "Task" [ 1744.305949] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.318378] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802839, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.533332] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802838, 'name': CreateVM_Task, 'duration_secs': 0.436868} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.533332] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8816718-0a35-4474-b162-c619b0acc154] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1744.533332] env[62519]: DEBUG nova.compute.manager [req-fcc92a82-7a07-4f9d-96d1-33ab6b4253be req-ebfeddea-d340-4056-82ca-fd96d30c469e service nova] [instance: c8816718-0a35-4474-b162-c619b0acc154] Received event network-changed-3a9391b3-bc92-4564-b965-73ca1d19957d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1744.533332] env[62519]: DEBUG nova.compute.manager [req-fcc92a82-7a07-4f9d-96d1-33ab6b4253be req-ebfeddea-d340-4056-82ca-fd96d30c469e service nova] [instance: c8816718-0a35-4474-b162-c619b0acc154] Refreshing instance network info cache due to event network-changed-3a9391b3-bc92-4564-b965-73ca1d19957d. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1744.533953] env[62519]: DEBUG oslo_concurrency.lockutils [req-fcc92a82-7a07-4f9d-96d1-33ab6b4253be req-ebfeddea-d340-4056-82ca-fd96d30c469e service nova] Acquiring lock "refresh_cache-c8816718-0a35-4474-b162-c619b0acc154" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.533953] env[62519]: DEBUG oslo_concurrency.lockutils [req-fcc92a82-7a07-4f9d-96d1-33ab6b4253be req-ebfeddea-d340-4056-82ca-fd96d30c469e service nova] Acquired lock "refresh_cache-c8816718-0a35-4474-b162-c619b0acc154" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.533953] env[62519]: DEBUG nova.network.neutron [req-fcc92a82-7a07-4f9d-96d1-33ab6b4253be req-ebfeddea-d340-4056-82ca-fd96d30c469e service nova] [instance: c8816718-0a35-4474-b162-c619b0acc154] Refreshing network info cache for port 3a9391b3-bc92-4564-b965-73ca1d19957d {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1744.538266] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.538454] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.538713] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1744.540858] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a35c0c9-dcc2-4a9b-96ab-feaac0e848fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.548734] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1744.548734] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f1f66b-4681-54da-36d9-ac49cbd7913a" [ 1744.548734] env[62519]: _type = "Task" [ 1744.548734] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.564273] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f1f66b-4681-54da-36d9-ac49cbd7913a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.656145] env[62519]: DEBUG oslo_concurrency.lockutils [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Releasing lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.725600] env[62519]: DEBUG nova.network.neutron [-] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.818039] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802839, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.819997] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35314d2-5e9f-44ca-bce3-8ef2fdfa2fbb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.830018] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc04897-9275-4395-b5a8-fcb30216e98a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.865964] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfd87ed-03df-434d-af21-79b18b9d38f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.875424] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6997e9-e6d1-4d88-8d0d-0215209d0c79 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.892118] env[62519]: DEBUG nova.compute.provider_tree [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.067604] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f1f66b-4681-54da-36d9-ac49cbd7913a, 'name': SearchDatastore_Task, 'duration_secs': 0.01222} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.067936] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.069050] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1745.069050] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.069050] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.069050] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1745.069582] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60f4ea84-8e08-4ddb-b6fc-c27315454b5d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.090359] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1745.090359] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1745.090359] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a151ed5-4319-4550-a31b-034f4b062439 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.098019] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1745.098019] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520127d3-2ced-7a0b-695e-55dc7e0e91a0" [ 1745.098019] env[62519]: _type = "Task" [ 1745.098019] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.105461] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520127d3-2ced-7a0b-695e-55dc7e0e91a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.227376] env[62519]: INFO nova.compute.manager [-] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Took 1.42 seconds to deallocate network for instance. [ 1745.317525] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802839, 'name': ReconfigVM_Task, 'duration_secs': 0.655275} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.320550] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Reconfigured VM instance instance-0000004f to attach disk [datastore1] dd60afd6-2834-4fca-a846-e39d57aabd60/dd60afd6-2834-4fca-a846-e39d57aabd60.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1745.320550] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-210e2ad2-bfd6-49b8-941f-c423e68259c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.326931] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1745.326931] env[62519]: value = "task-1802840" [ 1745.326931] env[62519]: _type = "Task" [ 1745.326931] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.344230] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802840, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.390246] env[62519]: DEBUG nova.network.neutron [req-fcc92a82-7a07-4f9d-96d1-33ab6b4253be req-ebfeddea-d340-4056-82ca-fd96d30c469e service nova] [instance: c8816718-0a35-4474-b162-c619b0acc154] Updated VIF entry in instance network info cache for port 3a9391b3-bc92-4564-b965-73ca1d19957d. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1745.390925] env[62519]: DEBUG nova.network.neutron [req-fcc92a82-7a07-4f9d-96d1-33ab6b4253be req-ebfeddea-d340-4056-82ca-fd96d30c469e service nova] [instance: c8816718-0a35-4474-b162-c619b0acc154] Updating instance_info_cache with network_info: [{"id": "3a9391b3-bc92-4564-b965-73ca1d19957d", "address": "fa:16:3e:23:3a:5a", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a9391b3-bc", "ovs_interfaceid": "3a9391b3-bc92-4564-b965-73ca1d19957d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1745.397770] env[62519]: DEBUG nova.scheduler.client.report [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1745.610614] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520127d3-2ced-7a0b-695e-55dc7e0e91a0, 'name': SearchDatastore_Task, 'duration_secs': 0.056896} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.610614] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04682d5d-44f4-4a11-acf1-8204de7306e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.619635] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1745.619635] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dedd97-3a00-aece-9e31-8d6df4fbf170" [ 1745.619635] env[62519]: _type = "Task" [ 1745.619635] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.629597] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dedd97-3a00-aece-9e31-8d6df4fbf170, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.734533] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.844719] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802840, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.893528] env[62519]: DEBUG oslo_concurrency.lockutils [req-fcc92a82-7a07-4f9d-96d1-33ab6b4253be req-ebfeddea-d340-4056-82ca-fd96d30c469e service nova] Releasing lock "refresh_cache-c8816718-0a35-4474-b162-c619b0acc154" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.908021] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.492s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.908394] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1745.913045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.630s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.915188] env[62519]: INFO nova.compute.claims [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1746.132614] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dedd97-3a00-aece-9e31-8d6df4fbf170, 'name': SearchDatastore_Task, 'duration_secs': 0.016325} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.133071] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1746.133994] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c8816718-0a35-4474-b162-c619b0acc154/c8816718-0a35-4474-b162-c619b0acc154.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1746.134237] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77773d59-0612-44d8-8e42-d06b04df64e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.144642] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1746.144642] env[62519]: value = "task-1802841" [ 1746.144642] env[62519]: _type = "Task" [ 1746.144642] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.154220] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802841, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.203485] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1746.203577] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f807920e-37c0-445d-bada-3f1b3d8f663a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.211377] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1746.211377] env[62519]: value = "task-1802842" [ 1746.211377] env[62519]: _type = "Task" [ 1746.211377] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.224489] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.347145] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802840, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.420538] env[62519]: DEBUG nova.compute.utils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1746.426247] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1746.426247] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1746.521718] env[62519]: DEBUG nova.policy [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e80a7ce112e4087af60e7de4d915118', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1e17b34cbef497985ff79e77d5b0f89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1746.577680] env[62519]: DEBUG nova.compute.manager [req-53ae2eda-d9f4-4fc2-8a89-733ebfbea6de req-b1886aae-7115-482c-a010-563f878211e0 service nova] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Received event network-vif-deleted-c119fc75-1f9d-4eb2-83fb-64e6f08d3dc8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1746.655350] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802841, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494873} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.655625] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c8816718-0a35-4474-b162-c619b0acc154/c8816718-0a35-4474-b162-c619b0acc154.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1746.655838] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1746.656093] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1551f99b-6693-4645-85a2-8a3819146b42 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.663556] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1746.663556] env[62519]: value = "task-1802843" [ 1746.663556] env[62519]: _type = "Task" [ 1746.663556] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.678966] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802843, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.490993] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1747.497170] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802842, 'name': PowerOffVM_Task, 'duration_secs': 0.206082} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.497320] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Successfully created port: 5fcd6d6f-381e-473c-a8b8-40f2f0ec3276 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1747.505109] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1747.506269] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08522c8c-688b-47ba-97c4-e6f92cd06251 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.509902] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.510151] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.516409] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802840, 'name': Rename_Task, 'duration_secs': 1.206975} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.518170] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1747.518464] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802843, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081589} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.531038] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28c400ab-1328-4fbc-a0ab-dac75cd6999a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.532816] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1747.536970] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14de94f2-1ee5-4607-a8ad-d34fa4a9d58a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.539886] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15b7138-475a-41c5-8155-7cecf88794c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.572204] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] c8816718-0a35-4474-b162-c619b0acc154/c8816718-0a35-4474-b162-c619b0acc154.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1747.574491] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1747.574491] env[62519]: value = "task-1802844" [ 1747.574491] env[62519]: _type = "Task" [ 1747.574491] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.577784] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f80e629-f860-4106-bee1-276031d75acb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.606093] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802844, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.607434] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1747.607434] env[62519]: value = "task-1802845" [ 1747.607434] env[62519]: _type = "Task" [ 1747.607434] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.612448] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1747.615253] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7834bc9-9b78-4a3d-99d2-45952d59499b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.623232] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802845, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.624708] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1747.624708] env[62519]: value = "task-1802846" [ 1747.624708] env[62519]: _type = "Task" [ 1747.624708] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.636943] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1747.637196] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1747.637417] env[62519]: DEBUG oslo_concurrency.lockutils [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.637567] env[62519]: DEBUG oslo_concurrency.lockutils [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.637742] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1747.638023] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e918843-fadc-4fe9-982a-03b552ecfa98 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.646369] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1747.646563] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1747.647325] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d5c026c-5b83-47e0-97c2-f9182693a827 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.659368] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.659603] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.661176] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1747.661176] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522d5c87-75ac-d454-a681-9591220a407a" [ 1747.661176] env[62519]: _type = "Task" [ 1747.661176] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.670929] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522d5c87-75ac-d454-a681-9591220a407a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.935580] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5dff75-471d-4d26-b504-cc52ce6fd412 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.945522] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e85b56e-cf46-44a0-9f48-783dfc018026 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.979638] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d341e706-0bfc-4963-a0fd-6feebadb907f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.987999] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff6317d-3867-4ce6-a7c7-bf1e39ebb763 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.008812] env[62519]: DEBUG nova.compute.provider_tree [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1748.017781] env[62519]: DEBUG nova.compute.utils [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1748.106956] env[62519]: DEBUG oslo_vmware.api [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802844, 'name': PowerOnVM_Task, 'duration_secs': 0.467381} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.107280] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1748.107482] env[62519]: INFO nova.compute.manager [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1748.107660] env[62519]: DEBUG nova.compute.manager [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1748.108596] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332a9a1b-e6af-44e3-a4ba-82423f868ced {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.123449] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802845, 'name': ReconfigVM_Task, 'duration_secs': 0.304837} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.123859] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Reconfigured VM instance instance-00000050 to attach disk [datastore1] c8816718-0a35-4474-b162-c619b0acc154/c8816718-0a35-4474-b162-c619b0acc154.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1748.124414] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8893b8d7-039b-434c-b025-b709001f1244 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.132055] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1748.132055] env[62519]: value = "task-1802847" [ 1748.132055] env[62519]: _type = "Task" [ 1748.132055] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.140546] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802847, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.165110] env[62519]: DEBUG nova.compute.manager [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1748.177210] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522d5c87-75ac-d454-a681-9591220a407a, 'name': SearchDatastore_Task, 'duration_secs': 0.010612} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.178026] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-917a226d-87d1-4d9b-8fee-1aa9f1b526bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.183963] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1748.183963] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52894ded-77aa-a8be-451e-fd17d348f22e" [ 1748.183963] env[62519]: _type = "Task" [ 1748.183963] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.196245] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52894ded-77aa-a8be-451e-fd17d348f22e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.513100] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1748.516036] env[62519]: DEBUG nova.scheduler.client.report [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1748.521377] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.547061] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1748.547374] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1748.547464] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1748.547622] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1748.547777] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1748.547925] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1748.548490] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1748.548634] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1748.548818] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1748.549021] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1748.549160] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1748.550077] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296155a7-41cf-4007-a803-4797f881db65 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.559251] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3972ae69-1866-4f1d-8352-d8073ec3805d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.637121] env[62519]: INFO nova.compute.manager [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Took 23.54 seconds to build instance. [ 1748.643464] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802847, 'name': Rename_Task, 'duration_secs': 0.149997} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.643740] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1748.644038] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1b490f2-85d4-450d-a6c2-0280024c3594 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.651342] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1748.651342] env[62519]: value = "task-1802848" [ 1748.651342] env[62519]: _type = "Task" [ 1748.651342] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.660222] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802848, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.694378] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52894ded-77aa-a8be-451e-fd17d348f22e, 'name': SearchDatastore_Task, 'duration_secs': 0.009463} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.696822] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.697187] env[62519]: DEBUG oslo_concurrency.lockutils [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1748.697492] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 156ed02a-3365-4a4f-b4de-ea86920d3baf/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. {{(pid=62519) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1748.697836] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d86b2fa9-8458-4f32-8942-6d32ccc70d3f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.706458] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1748.706458] env[62519]: value = "task-1802849" [ 1748.706458] env[62519]: _type = "Task" [ 1748.706458] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.722414] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.021424] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.109s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.021997] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1749.029023] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.406s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.029023] env[62519]: DEBUG nova.objects.instance [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lazy-loading 'resources' on Instance uuid 34d2991e-b6df-473d-8994-e45ff57ef131 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1749.164365] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802848, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.210091] env[62519]: DEBUG nova.compute.manager [req-195753d8-4b61-4cfe-88e0-37ac11931a82 req-52b9d9ce-5170-45b5-b410-b57d4484b645 service nova] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Received event network-vif-plugged-5fcd6d6f-381e-473c-a8b8-40f2f0ec3276 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1749.210247] env[62519]: DEBUG oslo_concurrency.lockutils [req-195753d8-4b61-4cfe-88e0-37ac11931a82 req-52b9d9ce-5170-45b5-b410-b57d4484b645 service nova] Acquiring lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.210588] env[62519]: DEBUG oslo_concurrency.lockutils [req-195753d8-4b61-4cfe-88e0-37ac11931a82 req-52b9d9ce-5170-45b5-b410-b57d4484b645 service nova] Lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.212958] env[62519]: DEBUG oslo_concurrency.lockutils [req-195753d8-4b61-4cfe-88e0-37ac11931a82 req-52b9d9ce-5170-45b5-b410-b57d4484b645 service nova] Lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.213180] env[62519]: DEBUG nova.compute.manager [req-195753d8-4b61-4cfe-88e0-37ac11931a82 req-52b9d9ce-5170-45b5-b410-b57d4484b645 service nova] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] No waiting events found dispatching network-vif-plugged-5fcd6d6f-381e-473c-a8b8-40f2f0ec3276 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1749.213355] env[62519]: WARNING nova.compute.manager [req-195753d8-4b61-4cfe-88e0-37ac11931a82 req-52b9d9ce-5170-45b5-b410-b57d4484b645 service nova] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Received unexpected event network-vif-plugged-5fcd6d6f-381e-473c-a8b8-40f2f0ec3276 for instance with vm_state building and task_state spawning. [ 1749.227161] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802849, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.359405] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Successfully updated port: 5fcd6d6f-381e-473c-a8b8-40f2f0ec3276 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1749.477171] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.477171] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.477171] env[62519]: INFO nova.compute.manager [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Attaching volume 38c2442b-9ab6-4c99-96ff-e9c0c22ab09d to /dev/sdb [ 1749.532325] env[62519]: DEBUG nova.compute.utils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1749.542484] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1749.542484] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1749.546019] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5ce5b5-809e-44fe-8f15-feed09624c8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.556249] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3f7e91-cef6-43df-9fa1-7cf61beb3235 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.572955] env[62519]: DEBUG nova.virt.block_device [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updating existing volume attachment record: 0a8c76f1-7141-43d3-aaf8-35d6b572b05c {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1749.685363] env[62519]: DEBUG oslo_vmware.api [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802848, 'name': PowerOnVM_Task, 'duration_secs': 0.821282} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.690561] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1749.690886] env[62519]: INFO nova.compute.manager [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1749.691024] env[62519]: DEBUG nova.compute.manager [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1749.692848] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c539f6-30e8-4675-8e0c-15c8cac2681b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.726572] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802849, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.609806} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.729801] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 156ed02a-3365-4a4f-b4de-ea86920d3baf/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. [ 1749.732957] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02efe961-e9d6-4c55-9001-75cb31b93cd4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.768680] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 156ed02a-3365-4a4f-b4de-ea86920d3baf/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1749.772331] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2e1e4dc-a268-4e94-a081-33ad3fa3b38c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.792994] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1749.792994] env[62519]: value = "task-1802851" [ 1749.792994] env[62519]: _type = "Task" [ 1749.792994] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.803116] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802851, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.813830] env[62519]: DEBUG nova.policy [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e80a7ce112e4087af60e7de4d915118', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1e17b34cbef497985ff79e77d5b0f89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1749.865703] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "refresh_cache-76786353-f93f-4e7e-b3f7-7f22ae4b7b41" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.865703] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "refresh_cache-76786353-f93f-4e7e-b3f7-7f22ae4b7b41" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.865703] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1750.054195] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1750.059450] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ffd70d-d629-4e26-bb96-f62dda296cce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.068285] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df57f8bf-7468-4f43-8c75-1be721da53a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.101293] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114660cd-1945-4b5c-97ac-cb620971f5d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.109643] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39afe649-b00b-47c0-8d94-757cc99c25eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.128318] env[62519]: DEBUG nova.compute.provider_tree [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1750.145501] env[62519]: DEBUG oslo_concurrency.lockutils [None req-22e1d356-1166-4a97-af20-48bea5d24d15 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "dd60afd6-2834-4fca-a846-e39d57aabd60" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.059s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.174383] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Successfully created port: fc6b82fe-716c-40e8-b027-ffa8ee2ea408 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1750.220280] env[62519]: INFO nova.compute.manager [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Took 24.06 seconds to build instance. [ 1750.304856] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802851, 'name': ReconfigVM_Task, 'duration_secs': 0.314264} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.305191] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 156ed02a-3365-4a4f-b4de-ea86920d3baf/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1750.309703] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e8dcdf-4305-4650-8841-b5d57c44fb7f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.342534] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fef919c7-8dce-4770-9f5e-c7a7cf0a87e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.362252] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1750.362252] env[62519]: value = "task-1802854" [ 1750.362252] env[62519]: _type = "Task" [ 1750.362252] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.376861] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802854, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.436389] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1750.633152] env[62519]: DEBUG nova.scheduler.client.report [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1750.665015] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Updating instance_info_cache with network_info: [{"id": "5fcd6d6f-381e-473c-a8b8-40f2f0ec3276", "address": "fa:16:3e:ca:6a:0f", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fcd6d6f-38", "ovs_interfaceid": "5fcd6d6f-381e-473c-a8b8-40f2f0ec3276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.837582] env[62519]: DEBUG nova.compute.manager [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1750.845260] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c9f27c-201c-4a79-b72f-7211603fb7b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.876682] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802854, 'name': ReconfigVM_Task, 'duration_secs': 0.18402} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.876682] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1750.877234] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9922d35-2da1-484b-af69-32001a1b0e8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.887035] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1750.887035] env[62519]: value = "task-1802855" [ 1750.887035] env[62519]: _type = "Task" [ 1750.887035] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.898824] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802855, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.063322] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1751.091914] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1751.092210] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1751.092553] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1751.092629] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1751.092821] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1751.092974] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1751.093258] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1751.093450] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1751.093652] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1751.093849] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1751.094069] env[62519]: DEBUG nova.virt.hardware [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1751.095048] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae1e867-a02d-492a-8808-e3c79492484d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.107883] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5199e074-ad60-4ec8-ba51-978bb65ca4ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.140423] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.146463] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.157s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.147867] env[62519]: INFO nova.compute.claims [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1751.162216] env[62519]: INFO nova.scheduler.client.report [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Deleted allocations for instance 34d2991e-b6df-473d-8994-e45ff57ef131 [ 1751.168726] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "refresh_cache-76786353-f93f-4e7e-b3f7-7f22ae4b7b41" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.169043] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Instance network_info: |[{"id": "5fcd6d6f-381e-473c-a8b8-40f2f0ec3276", "address": "fa:16:3e:ca:6a:0f", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fcd6d6f-38", "ovs_interfaceid": "5fcd6d6f-381e-473c-a8b8-40f2f0ec3276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1751.169512] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:6a:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fcd6d6f-381e-473c-a8b8-40f2f0ec3276', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1751.179833] env[62519]: DEBUG oslo.service.loopingcall [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1751.180526] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1751.181589] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4a0f2d8-2290-449f-aa51-7951cd79976c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.206580] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1751.206580] env[62519]: value = "task-1802856" [ 1751.206580] env[62519]: _type = "Task" [ 1751.206580] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.221110] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802856, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.234096] env[62519]: DEBUG nova.compute.manager [req-fb4bf245-0a20-4251-90ab-3eb80e36aea1 req-6cd6b660-a4db-40b4-8381-14744ce8fccf service nova] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Received event network-changed-5fcd6d6f-381e-473c-a8b8-40f2f0ec3276 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1751.234236] env[62519]: DEBUG nova.compute.manager [req-fb4bf245-0a20-4251-90ab-3eb80e36aea1 req-6cd6b660-a4db-40b4-8381-14744ce8fccf service nova] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Refreshing instance network info cache due to event network-changed-5fcd6d6f-381e-473c-a8b8-40f2f0ec3276. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1751.234493] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb4bf245-0a20-4251-90ab-3eb80e36aea1 req-6cd6b660-a4db-40b4-8381-14744ce8fccf service nova] Acquiring lock "refresh_cache-76786353-f93f-4e7e-b3f7-7f22ae4b7b41" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.234633] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb4bf245-0a20-4251-90ab-3eb80e36aea1 req-6cd6b660-a4db-40b4-8381-14744ce8fccf service nova] Acquired lock "refresh_cache-76786353-f93f-4e7e-b3f7-7f22ae4b7b41" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.234896] env[62519]: DEBUG nova.network.neutron [req-fb4bf245-0a20-4251-90ab-3eb80e36aea1 req-6cd6b660-a4db-40b4-8381-14744ce8fccf service nova] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Refreshing network info cache for port 5fcd6d6f-381e-473c-a8b8-40f2f0ec3276 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1751.273741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquiring lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.273994] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.362495] env[62519]: INFO nova.compute.manager [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] instance snapshotting [ 1751.365747] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227ff44c-2aef-4b58-8baa-fbdd3d2da35c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.387500] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1b8148-1bd7-4540-8036-e5aee298b2d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.398910] env[62519]: DEBUG oslo_vmware.api [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802855, 'name': PowerOnVM_Task, 'duration_secs': 0.445374} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.400893] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1751.405407] env[62519]: DEBUG nova.compute.manager [None req-660b8793-017b-4efd-8bfd-c060a9ba6127 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1751.408563] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188d893b-22b5-4bb9-86cf-16c2890a1172 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.602914] env[62519]: DEBUG oslo_concurrency.lockutils [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "c8816718-0a35-4474-b162-c619b0acc154" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.673611] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9946012-073d-42c5-b066-93d94ca91d4d tempest-ServersAdminTestJSON-710761587 tempest-ServersAdminTestJSON-710761587-project-member] Lock "34d2991e-b6df-473d-8994-e45ff57ef131" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.181s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.723287] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802856, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.728689] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86f29903-4db3-4b91-af52-9fc65de92246 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "c8816718-0a35-4474-b162-c619b0acc154" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.576s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.728810] env[62519]: DEBUG oslo_concurrency.lockutils [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "c8816718-0a35-4474-b162-c619b0acc154" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.126s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.728998] env[62519]: DEBUG nova.compute.manager [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1751.729865] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7bc1f9-9de4-4b59-aabc-a68f0655833f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.744071] env[62519]: DEBUG nova.compute.manager [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1751.744605] env[62519]: DEBUG nova.objects.instance [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lazy-loading 'flavor' on Instance uuid c8816718-0a35-4474-b162-c619b0acc154 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1751.760045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Acquiring lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.760045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.760045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Acquiring lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.760045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.760045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.760498] env[62519]: INFO nova.compute.manager [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Terminating instance [ 1751.776548] env[62519]: DEBUG nova.compute.manager [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1751.910960] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1751.911307] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3b78fd36-6f6b-4f2d-b66b-c3bd5ebce48a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.921448] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1751.921448] env[62519]: value = "task-1802857" [ 1751.921448] env[62519]: _type = "Task" [ 1751.921448] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.944852] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802857, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.027997] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Successfully updated port: fc6b82fe-716c-40e8-b027-ffa8ee2ea408 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1752.091669] env[62519]: DEBUG nova.network.neutron [req-fb4bf245-0a20-4251-90ab-3eb80e36aea1 req-6cd6b660-a4db-40b4-8381-14744ce8fccf service nova] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Updated VIF entry in instance network info cache for port 5fcd6d6f-381e-473c-a8b8-40f2f0ec3276. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1752.092110] env[62519]: DEBUG nova.network.neutron [req-fb4bf245-0a20-4251-90ab-3eb80e36aea1 req-6cd6b660-a4db-40b4-8381-14744ce8fccf service nova] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Updating instance_info_cache with network_info: [{"id": "5fcd6d6f-381e-473c-a8b8-40f2f0ec3276", "address": "fa:16:3e:ca:6a:0f", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fcd6d6f-38", "ovs_interfaceid": "5fcd6d6f-381e-473c-a8b8-40f2f0ec3276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.218342] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802856, 'name': CreateVM_Task, 'duration_secs': 0.788284} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.220910] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1752.221835] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.222015] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.222379] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1752.222651] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12d5ade9-d0ae-444f-91c0-7889a2fb7e8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.235308] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1752.235308] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52be7c1e-dafb-9eab-9822-3f8b4bec49b5" [ 1752.235308] env[62519]: _type = "Task" [ 1752.235308] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.244767] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52be7c1e-dafb-9eab-9822-3f8b4bec49b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.247313] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "88f9351c-253b-49dd-a88e-b8585ea742ac" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.247559] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.269851] env[62519]: DEBUG nova.compute.manager [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1752.270205] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1752.270487] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0968663f-b67f-4adf-bf7f-7ee520c554ef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.277991] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for the task: (returnval){ [ 1752.277991] env[62519]: value = "task-1802859" [ 1752.277991] env[62519]: _type = "Task" [ 1752.277991] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.297758] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802859, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.305943] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.434879] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802857, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.534874] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "refresh_cache-af422ca1-7966-4bed-97bf-2b4c5285eaab" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.534874] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "refresh_cache-af422ca1-7966-4bed-97bf-2b4c5285eaab" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.534874] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1752.563055] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23b5067-f942-42a8-968f-1a24b4154a96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.571216] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d559d7b0-c836-434c-b66d-7b67de3f03d8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.612301] env[62519]: DEBUG oslo_concurrency.lockutils [req-fb4bf245-0a20-4251-90ab-3eb80e36aea1 req-6cd6b660-a4db-40b4-8381-14744ce8fccf service nova] Releasing lock "refresh_cache-76786353-f93f-4e7e-b3f7-7f22ae4b7b41" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.614775] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b25553-86e8-45df-ab92-2ca49464a2d8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.624485] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced1a288-ae8f-403d-bfd3-adcd60050755 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.640951] env[62519]: DEBUG nova.compute.provider_tree [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.744148] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52be7c1e-dafb-9eab-9822-3f8b4bec49b5, 'name': SearchDatastore_Task, 'duration_secs': 0.011979} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.744440] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.744682] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1752.744920] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.745078] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.745265] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1752.745580] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-411a43f0-c76e-4f07-a42d-0113bcf1c919 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.750832] env[62519]: DEBUG nova.compute.utils [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1752.754904] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1752.755192] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-546f6702-0728-4cb1-8b42-7bba416ff61d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.757999] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1752.758209] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1752.759274] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c01b2f46-9394-4b0d-bb3b-f68bbcd9177e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.762431] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.762630] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.762787] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1752.768400] env[62519]: DEBUG oslo_vmware.api [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1752.768400] env[62519]: value = "task-1802860" [ 1752.768400] env[62519]: _type = "Task" [ 1752.768400] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.770351] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1752.770351] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c32aff-db2b-ae7e-08b4-bfe84cefd934" [ 1752.770351] env[62519]: _type = "Task" [ 1752.770351] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.778712] env[62519]: DEBUG oslo_vmware.api [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.788771] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c32aff-db2b-ae7e-08b4-bfe84cefd934, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.794358] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802859, 'name': PowerOffVM_Task, 'duration_secs': 0.309656} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.794682] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1752.794908] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1752.795133] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373638', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'name': 'volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '765cf18e-53a0-4cc6-ad0e-337a6f68915c', 'attached_at': '', 'detached_at': '', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'serial': '9692c2cd-3d11-41b6-88ba-eae1158b8c33'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1752.795920] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e64165-a9f3-4b0a-803d-bebbe76e36cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.817059] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca01a944-138b-44c9-b8eb-a959def8798a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.823941] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ce5e1a-c7a1-42ce-a447-c77bf6f38466 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.847025] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eceb4a4-8eac-47d2-bf55-4b18e038d5ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.863660] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] The volume has not been displaced from its original location: [datastore1] volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33/volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1752.869359] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Reconfiguring VM instance instance-0000002e to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1752.873092] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06ee6374-df3f-49ae-b51e-fdc8b1ebb547 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.890419] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for the task: (returnval){ [ 1752.890419] env[62519]: value = "task-1802861" [ 1752.890419] env[62519]: _type = "Task" [ 1752.890419] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.902780] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802861, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.934465] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802857, 'name': CreateSnapshot_Task, 'duration_secs': 0.669055} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.934985] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1752.935846] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c14ad9-cef1-4dc4-b23d-d8dbb661fd2a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.079147] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1753.148131] env[62519]: DEBUG nova.scheduler.client.report [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1753.253780] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.291170] env[62519]: DEBUG oslo_vmware.api [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802860, 'name': PowerOffVM_Task, 'duration_secs': 0.192075} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.292243] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c32aff-db2b-ae7e-08b4-bfe84cefd934, 'name': SearchDatastore_Task, 'duration_secs': 0.017119} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.292494] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1753.292689] env[62519]: DEBUG nova.compute.manager [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1753.293919] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef66b13-cd26-494b-abed-8977d7f4ac16 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.296535] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87008a8a-7575-4e3e-913e-79140774f9d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.303315] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1753.303315] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5216658e-810f-9ea7-af84-9d0b3fb5a434" [ 1753.303315] env[62519]: _type = "Task" [ 1753.303315] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.308564] env[62519]: DEBUG nova.network.neutron [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Updating instance_info_cache with network_info: [{"id": "fc6b82fe-716c-40e8-b027-ffa8ee2ea408", "address": "fa:16:3e:a4:f6:94", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc6b82fe-71", "ovs_interfaceid": "fc6b82fe-716c-40e8-b027-ffa8ee2ea408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.323500] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5216658e-810f-9ea7-af84-9d0b3fb5a434, 'name': SearchDatastore_Task, 'duration_secs': 0.010635} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.323815] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.324830] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 76786353-f93f-4e7e-b3f7-7f22ae4b7b41/76786353-f93f-4e7e-b3f7-7f22ae4b7b41.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1753.325426] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa028d08-49d6-4e28-95cb-f70913b60e9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.336792] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1753.336792] env[62519]: value = "task-1802862" [ 1753.336792] env[62519]: _type = "Task" [ 1753.336792] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.347806] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.402753] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802861, 'name': ReconfigVM_Task, 'duration_secs': 0.384182} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.403571] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Reconfigured VM instance instance-0000002e to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1753.408028] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e18f9b7-792a-4962-82a1-2d7dc0c59126 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.424910] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for the task: (returnval){ [ 1753.424910] env[62519]: value = "task-1802863" [ 1753.424910] env[62519]: _type = "Task" [ 1753.424910] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.437112] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802863, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.464721] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1753.466305] env[62519]: DEBUG nova.compute.manager [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Received event network-vif-plugged-fc6b82fe-716c-40e8-b027-ffa8ee2ea408 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1753.466629] env[62519]: DEBUG oslo_concurrency.lockutils [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] Acquiring lock "af422ca1-7966-4bed-97bf-2b4c5285eaab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.466965] env[62519]: DEBUG oslo_concurrency.lockutils [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] Lock "af422ca1-7966-4bed-97bf-2b4c5285eaab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.467424] env[62519]: DEBUG oslo_concurrency.lockutils [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] Lock "af422ca1-7966-4bed-97bf-2b4c5285eaab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.467768] env[62519]: DEBUG nova.compute.manager [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] No waiting events found dispatching network-vif-plugged-fc6b82fe-716c-40e8-b027-ffa8ee2ea408 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1753.468121] env[62519]: WARNING nova.compute.manager [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Received unexpected event network-vif-plugged-fc6b82fe-716c-40e8-b027-ffa8ee2ea408 for instance with vm_state building and task_state spawning. [ 1753.468557] env[62519]: DEBUG nova.compute.manager [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Received event network-changed-fc6b82fe-716c-40e8-b027-ffa8ee2ea408 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1753.470261] env[62519]: DEBUG nova.compute.manager [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Refreshing instance network info cache due to event network-changed-fc6b82fe-716c-40e8-b027-ffa8ee2ea408. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1753.470261] env[62519]: DEBUG oslo_concurrency.lockutils [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] Acquiring lock "refresh_cache-af422ca1-7966-4bed-97bf-2b4c5285eaab" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.470261] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c44390c5-e420-45ed-b79b-2f5fcddced84 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.483667] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1753.483667] env[62519]: value = "task-1802864" [ 1753.483667] env[62519]: _type = "Task" [ 1753.483667] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.494898] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802864, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.650774] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.651835] env[62519]: DEBUG nova.compute.manager [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1753.655036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.602s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.656700] env[62519]: INFO nova.compute.claims [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1753.816852] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "refresh_cache-af422ca1-7966-4bed-97bf-2b4c5285eaab" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.817247] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Instance network_info: |[{"id": "fc6b82fe-716c-40e8-b027-ffa8ee2ea408", "address": "fa:16:3e:a4:f6:94", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc6b82fe-71", "ovs_interfaceid": "fc6b82fe-716c-40e8-b027-ffa8ee2ea408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1753.818594] env[62519]: DEBUG oslo_concurrency.lockutils [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] Acquired lock "refresh_cache-af422ca1-7966-4bed-97bf-2b4c5285eaab" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.818594] env[62519]: DEBUG nova.network.neutron [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Refreshing network info cache for port fc6b82fe-716c-40e8-b027-ffa8ee2ea408 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1753.818986] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:f6:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc6b82fe-716c-40e8-b027-ffa8ee2ea408', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1753.828188] env[62519]: DEBUG oslo.service.loopingcall [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1753.828817] env[62519]: DEBUG oslo_concurrency.lockutils [None req-56c7c5eb-0a55-489d-8cd1-41c009141c0e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "c8816718-0a35-4474-b162-c619b0acc154" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.100s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.831632] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1753.832173] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-545a86da-141b-4081-a7ae-6d9f4ef6e4f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.877805] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1753.877805] env[62519]: value = "task-1802865" [ 1753.877805] env[62519]: _type = "Task" [ 1753.877805] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.877805] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802862, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.890392] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802865, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.936474] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802863, 'name': ReconfigVM_Task, 'duration_secs': 0.17031} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.936800] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373638', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'name': 'volume-9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '765cf18e-53a0-4cc6-ad0e-337a6f68915c', 'attached_at': '', 'detached_at': '', 'volume_id': '9692c2cd-3d11-41b6-88ba-eae1158b8c33', 'serial': '9692c2cd-3d11-41b6-88ba-eae1158b8c33'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1753.937370] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1753.939152] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539a5c40-221c-4b0e-be72-8496c1e57610 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.949166] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1753.949445] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a318d144-a447-4362-bef6-6bb421c5a6de {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.994558] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802864, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.128392] env[62519]: DEBUG nova.compute.manager [req-855d4742-e669-4082-ab31-427eaaab338b req-bdf00d04-7221-45fe-82ea-b8efa8156288 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Received event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1754.128756] env[62519]: DEBUG nova.compute.manager [req-855d4742-e669-4082-ab31-427eaaab338b req-bdf00d04-7221-45fe-82ea-b8efa8156288 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing instance network info cache due to event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1754.128894] env[62519]: DEBUG oslo_concurrency.lockutils [req-855d4742-e669-4082-ab31-427eaaab338b req-bdf00d04-7221-45fe-82ea-b8efa8156288 service nova] Acquiring lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.129283] env[62519]: DEBUG oslo_concurrency.lockutils [req-855d4742-e669-4082-ab31-427eaaab338b req-bdf00d04-7221-45fe-82ea-b8efa8156288 service nova] Acquired lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.129283] env[62519]: DEBUG nova.network.neutron [req-855d4742-e669-4082-ab31-427eaaab338b req-bdf00d04-7221-45fe-82ea-b8efa8156288 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1754.158048] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1754.158410] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1754.158617] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Deleting the datastore file [datastore1] 765cf18e-53a0-4cc6-ad0e-337a6f68915c {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1754.158944] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d9ce2f0-438c-4015-a2e1-cf65fbd4c9fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.162482] env[62519]: DEBUG nova.compute.utils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1754.165846] env[62519]: DEBUG nova.compute.manager [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1754.166693] env[62519]: DEBUG nova.network.neutron [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1754.175332] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for the task: (returnval){ [ 1754.175332] env[62519]: value = "task-1802867" [ 1754.175332] env[62519]: _type = "Task" [ 1754.175332] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.186602] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802867, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.240559] env[62519]: DEBUG nova.policy [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '710cbaa277014d8389c2c24cd739dbde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '448555031bb64aefafd0fcc67f4df10a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1754.368915] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "88f9351c-253b-49dd-a88e-b8585ea742ac" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.369425] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.370590] env[62519]: INFO nova.compute.manager [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Attaching volume 5bd7c4a6-d552-4040-aefb-7d5577b89149 to /dev/sdb [ 1754.382089] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802862, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.394205] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802865, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.429923] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc04247-d2b4-47be-841b-d5ad65eb085d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.437989] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1223a01-6240-41a0-934f-474749d819fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.452536] env[62519]: DEBUG nova.virt.block_device [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Updating existing volume attachment record: d9f1cb74-1130-4437-a80b-d1383fa7f854 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1754.495302] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802864, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.637671] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1754.638069] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373782', 'volume_id': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'name': 'volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '99f22198-1a65-4d0d-b665-90c7063dbdb9', 'attached_at': '', 'detached_at': '', 'volume_id': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'serial': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1754.640846] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1cdc77-c7ea-44d5-b8d8-2f7391fe7c72 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.668410] env[62519]: DEBUG nova.compute.manager [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1754.678528] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c34230-0e7f-4efc-9b06-99e3c877b133 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.741626] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d/volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1754.741626] env[62519]: DEBUG oslo_vmware.api [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Task: {'id': task-1802867, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082968} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.746725] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-506b4c81-9e3e-443e-a18e-293cf703cbe1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.772015] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1754.772257] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1754.772543] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1754.774185] env[62519]: INFO nova.compute.manager [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Took 2.50 seconds to destroy the instance on the hypervisor. [ 1754.774185] env[62519]: DEBUG oslo.service.loopingcall [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1754.777492] env[62519]: DEBUG nova.compute.manager [-] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1754.777763] env[62519]: DEBUG nova.network.neutron [-] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1754.789627] env[62519]: DEBUG oslo_vmware.api [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1754.789627] env[62519]: value = "task-1802871" [ 1754.789627] env[62519]: _type = "Task" [ 1754.789627] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.795157] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "c8816718-0a35-4474-b162-c619b0acc154" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.795409] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "c8816718-0a35-4474-b162-c619b0acc154" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.795609] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "c8816718-0a35-4474-b162-c619b0acc154-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.797036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "c8816718-0a35-4474-b162-c619b0acc154-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.797036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "c8816718-0a35-4474-b162-c619b0acc154-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.799453] env[62519]: INFO nova.compute.manager [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Terminating instance [ 1754.804348] env[62519]: DEBUG oslo_vmware.api [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802871, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.873961] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802862, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.891233] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802865, 'name': CreateVM_Task, 'duration_secs': 0.809395} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.892031] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1754.892186] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.892337] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.892693] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1754.892961] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-227becea-51c8-4031-b1aa-ff1c17cd9938 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.901117] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1754.901117] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52eeb94b-75a9-5ec0-06de-4df3f7ccd6ff" [ 1754.901117] env[62519]: _type = "Task" [ 1754.901117] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.911610] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52eeb94b-75a9-5ec0-06de-4df3f7ccd6ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.940313] env[62519]: DEBUG nova.network.neutron [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Updated VIF entry in instance network info cache for port fc6b82fe-716c-40e8-b027-ffa8ee2ea408. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1754.940778] env[62519]: DEBUG nova.network.neutron [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Updating instance_info_cache with network_info: [{"id": "fc6b82fe-716c-40e8-b027-ffa8ee2ea408", "address": "fa:16:3e:a4:f6:94", "network": {"id": "df66fb3a-d33e-4ba9-a3b0-cea73cd95302", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-944542718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1e17b34cbef497985ff79e77d5b0f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc6b82fe-71", "ovs_interfaceid": "fc6b82fe-716c-40e8-b027-ffa8ee2ea408", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.998664] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802864, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.111218] env[62519]: DEBUG nova.network.neutron [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Successfully created port: 99b49b4e-aba5-450d-a2db-3b35a0313a5b {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1755.160906] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a7d0b9-2ebe-4ac7-861b-91d5afe0f12e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.169781] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3bcacd-e3d0-4827-91ec-054146742f5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.207155] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00c8274-d06e-47b4-b6fc-ec31a6955e29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.219020] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5742ffb-3f3d-4c5e-8b9f-f4041a1c715a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.239238] env[62519]: DEBUG nova.compute.provider_tree [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1755.284544] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 1755.284773] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.284931] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.285093] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.285244] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.285383] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.285525] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.285652] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1755.285788] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.303293] env[62519]: DEBUG oslo_vmware.api [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802871, 'name': ReconfigVM_Task, 'duration_secs': 0.451172} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.303600] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Reconfigured VM instance instance-0000003e to attach disk [datastore1] volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d/volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1755.310028] env[62519]: DEBUG nova.network.neutron [req-855d4742-e669-4082-ab31-427eaaab338b req-bdf00d04-7221-45fe-82ea-b8efa8156288 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updated VIF entry in instance network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1755.311014] env[62519]: DEBUG nova.network.neutron [req-855d4742-e669-4082-ab31-427eaaab338b req-bdf00d04-7221-45fe-82ea-b8efa8156288 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updating instance_info_cache with network_info: [{"id": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "address": "fa:16:3e:2a:c1:c6", "network": {"id": "e0faae63-5ec4-4eef-a323-174dc4b623b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1005479736-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "7336e5aa2f9b42789cec48f07e586876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf20c500-c1", "ovs_interfaceid": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.315478] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e63bb21f-72ad-4450-a815-7c8307a7412c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.330037] env[62519]: DEBUG nova.compute.manager [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1755.330262] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1755.332225] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c1f42a-cd8c-4801-ba3a-6a3280dd929a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.341390] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1755.342783] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9438bc24-b6e9-4b3d-bcb6-b91735f4e4b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.344749] env[62519]: DEBUG oslo_vmware.api [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1755.344749] env[62519]: value = "task-1802872" [ 1755.344749] env[62519]: _type = "Task" [ 1755.344749] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.354283] env[62519]: DEBUG oslo_vmware.api [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802872, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.371118] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802862, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.553901} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.371489] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 76786353-f93f-4e7e-b3f7-7f22ae4b7b41/76786353-f93f-4e7e-b3f7-7f22ae4b7b41.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1755.371738] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1755.372039] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c0b0287-cf5e-416d-87a9-5a90b6bf4108 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.380557] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1755.380557] env[62519]: value = "task-1802874" [ 1755.380557] env[62519]: _type = "Task" [ 1755.380557] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.389951] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802874, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.414586] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52eeb94b-75a9-5ec0-06de-4df3f7ccd6ff, 'name': SearchDatastore_Task, 'duration_secs': 0.013433} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.414908] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.415261] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1755.415803] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.415803] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.415803] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1755.415959] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2cfe4d4-c59c-4f3e-87e0-3d16482da04c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.426422] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1755.426602] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1755.427498] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cc630b8-dc3d-4de2-b0a3-0aa62645f373 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.433797] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1755.433797] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a27961-8218-52cd-82ca-545f9d29f8b3" [ 1755.433797] env[62519]: _type = "Task" [ 1755.433797] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.445401] env[62519]: DEBUG oslo_concurrency.lockutils [req-11c9c0fb-3956-402c-a92e-082c20cdd1f7 req-4a9f1b7f-eb12-4fa9-9fbb-bc13cbd8ef9b service nova] Releasing lock "refresh_cache-af422ca1-7966-4bed-97bf-2b4c5285eaab" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.445401] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a27961-8218-52cd-82ca-545f9d29f8b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.492859] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1755.492859] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1755.492991] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleting the datastore file [datastore1] c8816718-0a35-4474-b162-c619b0acc154 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1755.493258] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff52416f-fc2d-43c2-a6b3-bf2fb97cacc0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.498698] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802864, 'name': CloneVM_Task, 'duration_secs': 1.936592} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.499443] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Created linked-clone VM from snapshot [ 1755.500283] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372363ac-341d-4935-8c92-b16c9cccff1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.509397] env[62519]: DEBUG oslo_vmware.api [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1755.509397] env[62519]: value = "task-1802875" [ 1755.509397] env[62519]: _type = "Task" [ 1755.509397] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.515274] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Uploading image 8e3b2975-9973-4787-adba-5db18d1935c6 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1755.524408] env[62519]: DEBUG oslo_vmware.api [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.530230] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1755.530538] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c1e2655f-b63e-469d-9c95-c6abe858edd4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.539085] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1755.539085] env[62519]: value = "task-1802876" [ 1755.539085] env[62519]: _type = "Task" [ 1755.539085] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.549099] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802876, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.707965] env[62519]: DEBUG nova.compute.manager [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1755.740321] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1755.741187] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1755.741413] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1755.741577] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1755.741738] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1755.741886] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1755.742110] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1755.742273] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1755.742440] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1755.742599] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1755.742786] env[62519]: DEBUG nova.virt.hardware [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1755.743824] env[62519]: DEBUG nova.scheduler.client.report [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1755.747327] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14758797-9536-46ef-aeee-b81528db7b30 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.756516] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03eb8d4f-8ad9-44b6-87ea-145b092af151 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.788419] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.834712] env[62519]: DEBUG oslo_concurrency.lockutils [req-855d4742-e669-4082-ab31-427eaaab338b req-bdf00d04-7221-45fe-82ea-b8efa8156288 service nova] Releasing lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.857445] env[62519]: DEBUG oslo_vmware.api [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802872, 'name': ReconfigVM_Task, 'duration_secs': 0.173739} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.857643] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373782', 'volume_id': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'name': 'volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '99f22198-1a65-4d0d-b665-90c7063dbdb9', 'attached_at': '', 'detached_at': '', 'volume_id': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'serial': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1755.893424] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802874, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070014} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.893753] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1755.894495] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f594fc-c32b-4dae-ae6e-0de22530144d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.926072] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 76786353-f93f-4e7e-b3f7-7f22ae4b7b41/76786353-f93f-4e7e-b3f7-7f22ae4b7b41.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1755.927956] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-459a0bc9-c473-46e1-9eb6-b24da25e85d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.954781] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a27961-8218-52cd-82ca-545f9d29f8b3, 'name': SearchDatastore_Task, 'duration_secs': 0.012135} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.959198] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1755.959198] env[62519]: value = "task-1802877" [ 1755.959198] env[62519]: _type = "Task" [ 1755.959198] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.959198] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19f49326-2a7b-4629-8b88-bad0c636a400 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.970517] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1755.970517] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d008ab-7021-79b9-1ee2-782c018ea700" [ 1755.970517] env[62519]: _type = "Task" [ 1755.970517] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.971591] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.983506] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d008ab-7021-79b9-1ee2-782c018ea700, 'name': SearchDatastore_Task, 'duration_secs': 0.011242} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.983506] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.983506] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] af422ca1-7966-4bed-97bf-2b4c5285eaab/af422ca1-7966-4bed-97bf-2b4c5285eaab.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1755.983506] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b9d7d96-5f1c-4242-afe1-95a22390476e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.991683] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1755.991683] env[62519]: value = "task-1802878" [ 1755.991683] env[62519]: _type = "Task" [ 1755.991683] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.001465] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.025166] env[62519]: DEBUG oslo_vmware.api [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1802875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156061} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.025478] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1756.025697] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1756.025895] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1756.026102] env[62519]: INFO nova.compute.manager [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: c8816718-0a35-4474-b162-c619b0acc154] Took 0.70 seconds to destroy the instance on the hypervisor. [ 1756.026409] env[62519]: DEBUG oslo.service.loopingcall [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1756.026637] env[62519]: DEBUG nova.compute.manager [-] [instance: c8816718-0a35-4474-b162-c619b0acc154] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1756.026743] env[62519]: DEBUG nova.network.neutron [-] [instance: c8816718-0a35-4474-b162-c619b0acc154] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1756.050178] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802876, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.251823] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.597s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.252488] env[62519]: DEBUG nova.compute.manager [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1756.255612] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.867s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.257198] env[62519]: INFO nova.compute.claims [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1756.445090] env[62519]: DEBUG nova.network.neutron [-] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.472371] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.511956] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802878, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496047} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.512303] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] af422ca1-7966-4bed-97bf-2b4c5285eaab/af422ca1-7966-4bed-97bf-2b4c5285eaab.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1756.512557] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1756.512814] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46df7c50-9823-4ede-babd-0447f39cb5cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.525014] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1756.525014] env[62519]: value = "task-1802879" [ 1756.525014] env[62519]: _type = "Task" [ 1756.525014] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.534654] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802879, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.551860] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802876, 'name': Destroy_Task, 'duration_secs': 0.703072} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.552381] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Destroyed the VM [ 1756.552733] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1756.555344] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d792f17a-cc80-40c1-b67d-f0f967c22f4a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.562027] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1756.562027] env[62519]: value = "task-1802880" [ 1756.562027] env[62519]: _type = "Task" [ 1756.562027] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.573870] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802880, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.587296] env[62519]: DEBUG nova.compute.manager [req-2e678d5d-759b-4c0e-a1b2-463ad0aa9c61 req-7afa66ff-9164-4516-95ee-cd4d419ba0e1 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Received event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1756.587582] env[62519]: DEBUG nova.compute.manager [req-2e678d5d-759b-4c0e-a1b2-463ad0aa9c61 req-7afa66ff-9164-4516-95ee-cd4d419ba0e1 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing instance network info cache due to event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1756.587817] env[62519]: DEBUG oslo_concurrency.lockutils [req-2e678d5d-759b-4c0e-a1b2-463ad0aa9c61 req-7afa66ff-9164-4516-95ee-cd4d419ba0e1 service nova] Acquiring lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.587966] env[62519]: DEBUG oslo_concurrency.lockutils [req-2e678d5d-759b-4c0e-a1b2-463ad0aa9c61 req-7afa66ff-9164-4516-95ee-cd4d419ba0e1 service nova] Acquired lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.588151] env[62519]: DEBUG nova.network.neutron [req-2e678d5d-759b-4c0e-a1b2-463ad0aa9c61 req-7afa66ff-9164-4516-95ee-cd4d419ba0e1 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1756.762704] env[62519]: DEBUG nova.compute.utils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1756.772566] env[62519]: DEBUG nova.compute.manager [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1756.772566] env[62519]: DEBUG nova.network.neutron [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1756.887732] env[62519]: DEBUG nova.policy [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '710cbaa277014d8389c2c24cd739dbde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '448555031bb64aefafd0fcc67f4df10a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1756.922799] env[62519]: DEBUG nova.objects.instance [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.952545] env[62519]: INFO nova.compute.manager [-] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Took 2.17 seconds to deallocate network for instance. [ 1756.976018] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802877, 'name': ReconfigVM_Task, 'duration_secs': 0.577548} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.976018] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 76786353-f93f-4e7e-b3f7-7f22ae4b7b41/76786353-f93f-4e7e-b3f7-7f22ae4b7b41.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1756.976018] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-293252f4-5e1f-47a2-80e2-120d0d9e028a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.985124] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1756.985124] env[62519]: value = "task-1802882" [ 1756.985124] env[62519]: _type = "Task" [ 1756.985124] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.998498] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802882, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.037237] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802879, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104411} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.037544] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1757.039326] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39645836-0be9-4c18-bf74-6feef4da98a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.063772] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] af422ca1-7966-4bed-97bf-2b4c5285eaab/af422ca1-7966-4bed-97bf-2b4c5285eaab.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1757.064158] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f3f7a18-5b4d-412d-aebe-d51e300ddc4f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.091059] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802880, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.092838] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1757.092838] env[62519]: value = "task-1802883" [ 1757.092838] env[62519]: _type = "Task" [ 1757.092838] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.108736] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802883, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.119233] env[62519]: DEBUG nova.network.neutron [-] [instance: c8816718-0a35-4474-b162-c619b0acc154] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.121479] env[62519]: DEBUG nova.compute.manager [req-6be4c689-8397-4cf4-a686-e72387bcccf2 req-313c9ec0-16fb-42e8-9479-dc45de6ee35a service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Received event network-vif-plugged-99b49b4e-aba5-450d-a2db-3b35a0313a5b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1757.121785] env[62519]: DEBUG oslo_concurrency.lockutils [req-6be4c689-8397-4cf4-a686-e72387bcccf2 req-313c9ec0-16fb-42e8-9479-dc45de6ee35a service nova] Acquiring lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.122099] env[62519]: DEBUG oslo_concurrency.lockutils [req-6be4c689-8397-4cf4-a686-e72387bcccf2 req-313c9ec0-16fb-42e8-9479-dc45de6ee35a service nova] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.122379] env[62519]: DEBUG oslo_concurrency.lockutils [req-6be4c689-8397-4cf4-a686-e72387bcccf2 req-313c9ec0-16fb-42e8-9479-dc45de6ee35a service nova] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.122646] env[62519]: DEBUG nova.compute.manager [req-6be4c689-8397-4cf4-a686-e72387bcccf2 req-313c9ec0-16fb-42e8-9479-dc45de6ee35a service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] No waiting events found dispatching network-vif-plugged-99b49b4e-aba5-450d-a2db-3b35a0313a5b {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1757.123031] env[62519]: WARNING nova.compute.manager [req-6be4c689-8397-4cf4-a686-e72387bcccf2 req-313c9ec0-16fb-42e8-9479-dc45de6ee35a service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Received unexpected event network-vif-plugged-99b49b4e-aba5-450d-a2db-3b35a0313a5b for instance with vm_state building and task_state spawning. [ 1757.273474] env[62519]: DEBUG nova.compute.manager [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1757.301745] env[62519]: DEBUG nova.network.neutron [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Successfully updated port: 99b49b4e-aba5-450d-a2db-3b35a0313a5b {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1757.432802] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7972f1f6-95b2-485b-b78c-3e695e516e72 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.956s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.443319] env[62519]: DEBUG nova.network.neutron [req-2e678d5d-759b-4c0e-a1b2-463ad0aa9c61 req-7afa66ff-9164-4516-95ee-cd4d419ba0e1 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updated VIF entry in instance network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1757.443319] env[62519]: DEBUG nova.network.neutron [req-2e678d5d-759b-4c0e-a1b2-463ad0aa9c61 req-7afa66ff-9164-4516-95ee-cd4d419ba0e1 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updating instance_info_cache with network_info: [{"id": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "address": "fa:16:3e:2a:c1:c6", "network": {"id": "e0faae63-5ec4-4eef-a323-174dc4b623b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1005479736-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "7336e5aa2f9b42789cec48f07e586876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf20c500-c1", "ovs_interfaceid": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.502162] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802882, 'name': Rename_Task, 'duration_secs': 0.192878} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.502162] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1757.504380] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af86f19c-c0cd-4ea2-b443-f9f157477caf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.512458] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1757.512458] env[62519]: value = "task-1802884" [ 1757.512458] env[62519]: _type = "Task" [ 1757.512458] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.519687] env[62519]: INFO nova.compute.manager [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Took 0.57 seconds to detach 1 volumes for instance. [ 1757.522366] env[62519]: DEBUG nova.network.neutron [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Successfully created port: 224051cd-32a2-49fc-b4a3-523f444e85a2 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1757.525170] env[62519]: DEBUG nova.compute.manager [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Deleting volume: 9692c2cd-3d11-41b6-88ba-eae1158b8c33 {{(pid=62519) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3284}} [ 1757.532280] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.582770] env[62519]: DEBUG oslo_vmware.api [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802880, 'name': RemoveSnapshot_Task, 'duration_secs': 0.712502} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.582841] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1757.605783] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802883, 'name': ReconfigVM_Task, 'duration_secs': 0.350258} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.606181] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Reconfigured VM instance instance-00000052 to attach disk [datastore1] af422ca1-7966-4bed-97bf-2b4c5285eaab/af422ca1-7966-4bed-97bf-2b4c5285eaab.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1757.606971] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f9797db-1ab6-442e-9679-dc55705d52bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.616790] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1757.616790] env[62519]: value = "task-1802885" [ 1757.616790] env[62519]: _type = "Task" [ 1757.616790] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.625207] env[62519]: INFO nova.compute.manager [-] [instance: c8816718-0a35-4474-b162-c619b0acc154] Took 1.60 seconds to deallocate network for instance. [ 1757.631304] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802885, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.742017] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3cd67c-21d9-4de1-aa6b-bb1be0718eb6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.749461] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95030e70-a0bb-49fb-af07-2ff22890065d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.786109] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d381fc4-7190-462f-8c54-d49fe60a515c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.795622] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b66a02-cb62-4b92-9c40-bcd803505566 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.813181] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.813502] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.813755] env[62519]: DEBUG nova.network.neutron [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1757.815335] env[62519]: DEBUG nova.compute.provider_tree [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1757.946349] env[62519]: DEBUG oslo_concurrency.lockutils [req-2e678d5d-759b-4c0e-a1b2-463ad0aa9c61 req-7afa66ff-9164-4516-95ee-cd4d419ba0e1 service nova] Releasing lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.946925] env[62519]: DEBUG nova.compute.manager [req-2e678d5d-759b-4c0e-a1b2-463ad0aa9c61 req-7afa66ff-9164-4516-95ee-cd4d419ba0e1 service nova] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Received event network-vif-deleted-53564e7d-5a36-43d6-a79a-d13c12623d01 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1758.030070] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802884, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.093251] env[62519]: WARNING nova.compute.manager [None req-db7eaaae-d24f-47e5-8a15-07d26fb6316e tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Image not found during snapshot: nova.exception.ImageNotFound: Image 8e3b2975-9973-4787-adba-5db18d1935c6 could not be found. [ 1758.128183] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802885, 'name': Rename_Task, 'duration_secs': 0.412083} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.128736] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1758.129163] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe30799a-0f73-4485-a06e-5ea298a05e6a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.136766] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.140122] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1758.140122] env[62519]: value = "task-1802887" [ 1758.140122] env[62519]: _type = "Task" [ 1758.140122] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.155281] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.155641] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.292021] env[62519]: DEBUG nova.compute.manager [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1758.321141] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1758.321141] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1758.321141] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1758.321141] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1758.321747] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1758.322105] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1758.322517] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1758.322847] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1758.323257] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1758.323595] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1758.324290] env[62519]: DEBUG nova.virt.hardware [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1758.333754] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86c6327-8a8d-4c09-9fe4-66142e4edad1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.345341] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66b3205-5b5d-4040-85ee-bc5ffceca1c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.363977] env[62519]: ERROR nova.scheduler.client.report [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [req-5a51ab26-bb11-4710-9ddc-f35663b68c77] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5a51ab26-bb11-4710-9ddc-f35663b68c77"}]} [ 1758.372642] env[62519]: DEBUG nova.network.neutron [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1758.382728] env[62519]: DEBUG nova.scheduler.client.report [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1758.408351] env[62519]: DEBUG nova.scheduler.client.report [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1758.408718] env[62519]: DEBUG nova.compute.provider_tree [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1758.424603] env[62519]: DEBUG nova.scheduler.client.report [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1758.443817] env[62519]: DEBUG nova.scheduler.client.report [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1758.525525] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802884, 'name': PowerOnVM_Task, 'duration_secs': 0.7457} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.525814] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1758.526023] env[62519]: INFO nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Took 10.01 seconds to spawn the instance on the hypervisor. [ 1758.526219] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1758.527141] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b824339-4341-408f-99dc-be10d87006c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.542715] env[62519]: DEBUG nova.network.neutron [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Updating instance_info_cache with network_info: [{"id": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "address": "fa:16:3e:1f:a9:ee", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99b49b4e-ab", "ovs_interfaceid": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.652086] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802887, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.781410] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.781668] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.781856] env[62519]: DEBUG nova.compute.manager [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1758.783118] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d35b879-fe8a-45c6-9705-ec5afd73fa35 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.792522] env[62519]: DEBUG nova.compute.manager [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1758.793386] env[62519]: DEBUG nova.objects.instance [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1758.821925] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b73d68-e997-4f4e-b7e2-2bf91744c305 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.833722] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a735fee1-04bf-4197-9c87-418acacf82fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.873325] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d858ce-14f8-4d3b-90c9-37288a05f855 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.884021] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2732351-2bcb-4ae5-82bc-b988a52ebda6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.903761] env[62519]: DEBUG nova.compute.provider_tree [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1759.007522] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1759.007761] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1759.008663] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb72451a-57e9-40f9-875d-df13b4be20ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.026421] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c86fa5-70ac-42eb-8b03-d3943f609509 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.045801] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.046278] env[62519]: DEBUG nova.compute.manager [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Instance network_info: |[{"id": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "address": "fa:16:3e:1f:a9:ee", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99b49b4e-ab", "ovs_interfaceid": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1759.060188] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] volume-5bd7c4a6-d552-4040-aefb-7d5577b89149/volume-5bd7c4a6-d552-4040-aefb-7d5577b89149.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1759.060834] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:a9:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99b49b4e-aba5-450d-a2db-3b35a0313a5b', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1759.074350] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Creating folder: Project (448555031bb64aefafd0fcc67f4df10a). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1759.082316] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d468678-8b7a-4731-afd5-b4525df3ec7f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.098696] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-494b00ac-73ea-4c50-a10d-43608bd1c2c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.101803] env[62519]: INFO nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Took 29.89 seconds to build instance. [ 1759.109205] env[62519]: DEBUG oslo_vmware.api [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1759.109205] env[62519]: value = "task-1802889" [ 1759.109205] env[62519]: _type = "Task" [ 1759.109205] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.114489] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Created folder: Project (448555031bb64aefafd0fcc67f4df10a) in parent group-v373567. [ 1759.114653] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Creating folder: Instances. Parent ref: group-v373789. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1759.115250] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-819504b3-348b-4275-b66b-c0403c47069e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.126082] env[62519]: DEBUG oslo_vmware.api [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802889, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.130190] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Created folder: Instances in parent group-v373789. [ 1759.130443] env[62519]: DEBUG oslo.service.loopingcall [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1759.130655] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1759.130883] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db88120c-77ec-4b53-b8d9-6f9109e7722d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.155623] env[62519]: DEBUG nova.compute.manager [req-0fc77b8e-c4ba-42ff-8b4a-db3285e3fc10 req-8a8a08a3-ebdb-46e5-b05e-066f408f15ea service nova] [instance: c8816718-0a35-4474-b162-c619b0acc154] Received event network-vif-deleted-3a9391b3-bc92-4564-b965-73ca1d19957d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1759.155623] env[62519]: DEBUG nova.compute.manager [req-0fc77b8e-c4ba-42ff-8b4a-db3285e3fc10 req-8a8a08a3-ebdb-46e5-b05e-066f408f15ea service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Received event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1759.155623] env[62519]: DEBUG nova.compute.manager [req-0fc77b8e-c4ba-42ff-8b4a-db3285e3fc10 req-8a8a08a3-ebdb-46e5-b05e-066f408f15ea service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing instance network info cache due to event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1759.155623] env[62519]: DEBUG oslo_concurrency.lockutils [req-0fc77b8e-c4ba-42ff-8b4a-db3285e3fc10 req-8a8a08a3-ebdb-46e5-b05e-066f408f15ea service nova] Acquiring lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.155623] env[62519]: DEBUG oslo_concurrency.lockutils [req-0fc77b8e-c4ba-42ff-8b4a-db3285e3fc10 req-8a8a08a3-ebdb-46e5-b05e-066f408f15ea service nova] Acquired lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.155623] env[62519]: DEBUG nova.network.neutron [req-0fc77b8e-c4ba-42ff-8b4a-db3285e3fc10 req-8a8a08a3-ebdb-46e5-b05e-066f408f15ea service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1759.157809] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1759.157809] env[62519]: value = "task-1802891" [ 1759.157809] env[62519]: _type = "Task" [ 1759.157809] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.165664] env[62519]: DEBUG oslo_vmware.api [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802887, 'name': PowerOnVM_Task, 'duration_secs': 0.64514} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.166415] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1759.166633] env[62519]: INFO nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Took 8.10 seconds to spawn the instance on the hypervisor. [ 1759.170017] env[62519]: DEBUG nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1759.170017] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16dc7ed-cf8d-4e7e-a8a9-61d77ed56c69 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.174246] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802891, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.398200] env[62519]: DEBUG nova.compute.manager [req-91ec5088-6891-4462-8b46-03a380baa7ca req-6fd82bc3-314e-4e76-9e9e-dca1b8058164 service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Received event network-changed-99b49b4e-aba5-450d-a2db-3b35a0313a5b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1759.398200] env[62519]: DEBUG nova.compute.manager [req-91ec5088-6891-4462-8b46-03a380baa7ca req-6fd82bc3-314e-4e76-9e9e-dca1b8058164 service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Refreshing instance network info cache due to event network-changed-99b49b4e-aba5-450d-a2db-3b35a0313a5b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1759.398686] env[62519]: DEBUG oslo_concurrency.lockutils [req-91ec5088-6891-4462-8b46-03a380baa7ca req-6fd82bc3-314e-4e76-9e9e-dca1b8058164 service nova] Acquiring lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.399028] env[62519]: DEBUG oslo_concurrency.lockutils [req-91ec5088-6891-4462-8b46-03a380baa7ca req-6fd82bc3-314e-4e76-9e9e-dca1b8058164 service nova] Acquired lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.399403] env[62519]: DEBUG nova.network.neutron [req-91ec5088-6891-4462-8b46-03a380baa7ca req-6fd82bc3-314e-4e76-9e9e-dca1b8058164 service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Refreshing network info cache for port 99b49b4e-aba5-450d-a2db-3b35a0313a5b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1759.461031] env[62519]: DEBUG nova.scheduler.client.report [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 116 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1759.461031] env[62519]: DEBUG nova.compute.provider_tree [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 116 to 117 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1759.461031] env[62519]: DEBUG nova.compute.provider_tree [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1759.464247] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "dd60afd6-2834-4fca-a846-e39d57aabd60" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.464787] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "dd60afd6-2834-4fca-a846-e39d57aabd60" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.465394] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "dd60afd6-2834-4fca-a846-e39d57aabd60-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.465792] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "dd60afd6-2834-4fca-a846-e39d57aabd60-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.466243] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "dd60afd6-2834-4fca-a846-e39d57aabd60-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.470518] env[62519]: INFO nova.compute.manager [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Terminating instance [ 1759.627432] env[62519]: DEBUG oslo_vmware.api [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802889, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.671026] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802891, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.697972] env[62519]: INFO nova.compute.manager [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Took 30.44 seconds to build instance. [ 1759.804016] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1759.804016] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6865e59-e0f4-4abe-a504-947e6ed41ce8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.811955] env[62519]: DEBUG oslo_vmware.api [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1759.811955] env[62519]: value = "task-1802892" [ 1759.811955] env[62519]: _type = "Task" [ 1759.811955] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.825597] env[62519]: DEBUG oslo_vmware.api [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.969704] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.714s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.970465] env[62519]: DEBUG nova.compute.manager [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1759.974458] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.484s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.975085] env[62519]: DEBUG nova.objects.instance [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lazy-loading 'resources' on Instance uuid d710c97b-a2fd-4a54-baaa-ec7664895ce7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1759.977698] env[62519]: DEBUG nova.compute.manager [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1759.978162] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1759.979349] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cbd18b-4d73-45f1-b224-5ab770c8ff09 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.991997] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1759.992759] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-238c00e4-c3e3-4892-9477-8d811adbcc71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.005392] env[62519]: DEBUG oslo_vmware.api [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1760.005392] env[62519]: value = "task-1802893" [ 1760.005392] env[62519]: _type = "Task" [ 1760.005392] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.014321] env[62519]: DEBUG nova.network.neutron [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Successfully updated port: 224051cd-32a2-49fc-b4a3-523f444e85a2 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1760.019748] env[62519]: DEBUG nova.network.neutron [req-0fc77b8e-c4ba-42ff-8b4a-db3285e3fc10 req-8a8a08a3-ebdb-46e5-b05e-066f408f15ea service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updated VIF entry in instance network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1760.020152] env[62519]: DEBUG nova.network.neutron [req-0fc77b8e-c4ba-42ff-8b4a-db3285e3fc10 req-8a8a08a3-ebdb-46e5-b05e-066f408f15ea service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updating instance_info_cache with network_info: [{"id": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "address": "fa:16:3e:2a:c1:c6", "network": {"id": "e0faae63-5ec4-4eef-a323-174dc4b623b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1005479736-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "7336e5aa2f9b42789cec48f07e586876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf20c500-c1", "ovs_interfaceid": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.028120] env[62519]: DEBUG oslo_vmware.api [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802893, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.123124] env[62519]: DEBUG oslo_vmware.api [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802889, 'name': ReconfigVM_Task, 'duration_secs': 0.703154} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.123537] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfigured VM instance instance-00000047 to attach disk [datastore1] volume-5bd7c4a6-d552-4040-aefb-7d5577b89149/volume-5bd7c4a6-d552-4040-aefb-7d5577b89149.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1760.129444] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c282fe84-738c-469f-8c82-35b56af96505 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.155688] env[62519]: DEBUG oslo_vmware.api [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1760.155688] env[62519]: value = "task-1802894" [ 1760.155688] env[62519]: _type = "Task" [ 1760.155688] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.173347] env[62519]: DEBUG oslo_vmware.api [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802894, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.177381] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802891, 'name': CreateVM_Task, 'duration_secs': 0.537797} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.177593] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1760.178545] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.178749] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.179197] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1760.179447] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbd71edc-2aa1-41be-a298-fba1ee3e6d09 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.190840] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1760.190840] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5271f66c-6e56-b02d-ac59-72ca7fa2e74e" [ 1760.190840] env[62519]: _type = "Task" [ 1760.190840] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.203433] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5271f66c-6e56-b02d-ac59-72ca7fa2e74e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.326144] env[62519]: DEBUG oslo_vmware.api [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802892, 'name': PowerOffVM_Task, 'duration_secs': 0.354665} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.326683] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1760.327063] env[62519]: DEBUG nova.compute.manager [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1760.328358] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62a65ca-94ad-44e6-8947-98fe757e706d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.336279] env[62519]: DEBUG nova.network.neutron [req-91ec5088-6891-4462-8b46-03a380baa7ca req-6fd82bc3-314e-4e76-9e9e-dca1b8058164 service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Updated VIF entry in instance network info cache for port 99b49b4e-aba5-450d-a2db-3b35a0313a5b. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1760.336809] env[62519]: DEBUG nova.network.neutron [req-91ec5088-6891-4462-8b46-03a380baa7ca req-6fd82bc3-314e-4e76-9e9e-dca1b8058164 service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Updating instance_info_cache with network_info: [{"id": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "address": "fa:16:3e:1f:a9:ee", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99b49b4e-ab", "ovs_interfaceid": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.479433] env[62519]: DEBUG nova.compute.utils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1760.483830] env[62519]: DEBUG nova.compute.manager [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1760.483830] env[62519]: DEBUG nova.network.neutron [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1760.521949] env[62519]: DEBUG oslo_vmware.api [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802893, 'name': PowerOffVM_Task, 'duration_secs': 0.269959} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.522116] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "refresh_cache-2ea8304e-5b91-4908-a876-6e2c780b1da9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.522196] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "refresh_cache-2ea8304e-5b91-4908-a876-6e2c780b1da9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.522336] env[62519]: DEBUG nova.network.neutron [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1760.523535] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1760.523739] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1760.524103] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e778ee49-027c-4459-a706-b9fc6a68f482 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.532333] env[62519]: DEBUG oslo_concurrency.lockutils [req-0fc77b8e-c4ba-42ff-8b4a-db3285e3fc10 req-8a8a08a3-ebdb-46e5-b05e-066f408f15ea service nova] Releasing lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.559734] env[62519]: DEBUG nova.policy [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12d88579b24941a0be744afe44126360', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df6727c290724a8ebef5188c77e91399', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1760.611790] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.405s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.635506] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1760.635734] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1760.635734] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleting the datastore file [datastore1] dd60afd6-2834-4fca-a846-e39d57aabd60 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1760.636015] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86a3f038-1f2a-4531-87b8-2219b4535184 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.646842] env[62519]: DEBUG oslo_vmware.api [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for the task: (returnval){ [ 1760.646842] env[62519]: value = "task-1802896" [ 1760.646842] env[62519]: _type = "Task" [ 1760.646842] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.663258] env[62519]: DEBUG oslo_vmware.api [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.670580] env[62519]: DEBUG oslo_vmware.api [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802894, 'name': ReconfigVM_Task, 'duration_secs': 0.211474} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.671096] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1760.703043] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5271f66c-6e56-b02d-ac59-72ca7fa2e74e, 'name': SearchDatastore_Task, 'duration_secs': 0.015426} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.703043] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.703043] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1760.703263] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.703302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.703469] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1760.703726] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49e6706f-526c-4832-a246-6bf822041824 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.715963] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1760.716134] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1760.717285] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4186289a-4fe9-4b95-add5-55d940b9df7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.730363] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1760.730363] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b057d0-deec-a159-adb4-be965354832a" [ 1760.730363] env[62519]: _type = "Task" [ 1760.730363] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.745373] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b057d0-deec-a159-adb4-be965354832a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.839597] env[62519]: DEBUG oslo_concurrency.lockutils [req-91ec5088-6891-4462-8b46-03a380baa7ca req-6fd82bc3-314e-4e76-9e9e-dca1b8058164 service nova] Releasing lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.847792] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dfc779c2-2320-4981-a07b-ca26adeabb8d tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.065s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.919197] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6218e4c2-ee73-4d77-8754-b710468975c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.931695] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c06fc61-dd59-413a-8174-5d37f05742c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.971158] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423ef2aa-29c2-4def-8fa0-a6fd4b24ee48 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.981973] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145f4979-90df-4c68-a3d2-0577f07102b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.988218] env[62519]: DEBUG nova.compute.manager [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1761.004211] env[62519]: DEBUG nova.compute.provider_tree [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1761.103423] env[62519]: DEBUG nova.network.neutron [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1761.107584] env[62519]: DEBUG nova.network.neutron [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Successfully created port: f8e70085-af0c-4731-8b61-cf5cd2460344 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1761.161945] env[62519]: DEBUG oslo_vmware.api [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Task: {'id': task-1802896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.297117} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.162261] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1761.162448] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1761.162653] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1761.162787] env[62519]: INFO nova.compute.manager [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1761.163062] env[62519]: DEBUG oslo.service.loopingcall [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1761.163268] env[62519]: DEBUG nova.compute.manager [-] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1761.164337] env[62519]: DEBUG nova.network.neutron [-] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1761.208582] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51df1d83-d7cf-4798-abd3-f19fe8d9b526 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "af422ca1-7966-4bed-97bf-2b4c5285eaab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.973s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.247070] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b057d0-deec-a159-adb4-be965354832a, 'name': SearchDatastore_Task, 'duration_secs': 0.015057} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.248494] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34253b41-d81b-49be-bf16-8f5336d50ebe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.257849] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1761.257849] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52354579-8c31-5be9-4691-996047dd7bac" [ 1761.257849] env[62519]: _type = "Task" [ 1761.257849] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.274483] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52354579-8c31-5be9-4691-996047dd7bac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.298341] env[62519]: DEBUG nova.compute.manager [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Received event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1761.298754] env[62519]: DEBUG nova.compute.manager [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing instance network info cache due to event network-changed-af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1761.299368] env[62519]: DEBUG oslo_concurrency.lockutils [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] Acquiring lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.299458] env[62519]: DEBUG oslo_concurrency.lockutils [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] Acquired lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.299734] env[62519]: DEBUG nova.network.neutron [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Refreshing network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1761.380151] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.380151] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.380151] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.380151] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.380151] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.383432] env[62519]: INFO nova.compute.manager [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Terminating instance [ 1761.424424] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "156ed02a-3365-4a4f-b4de-ea86920d3baf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.424741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "156ed02a-3365-4a4f-b4de-ea86920d3baf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.424957] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "156ed02a-3365-4a4f-b4de-ea86920d3baf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.425506] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "156ed02a-3365-4a4f-b4de-ea86920d3baf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.425506] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "156ed02a-3365-4a4f-b4de-ea86920d3baf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.428943] env[62519]: INFO nova.compute.manager [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Terminating instance [ 1761.487925] env[62519]: DEBUG nova.network.neutron [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Updating instance_info_cache with network_info: [{"id": "224051cd-32a2-49fc-b4a3-523f444e85a2", "address": "fa:16:3e:85:a2:ec", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224051cd-32", "ovs_interfaceid": "224051cd-32a2-49fc-b4a3-523f444e85a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.521716] env[62519]: DEBUG oslo_concurrency.lockutils [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "af422ca1-7966-4bed-97bf-2b4c5285eaab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.521716] env[62519]: DEBUG oslo_concurrency.lockutils [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "af422ca1-7966-4bed-97bf-2b4c5285eaab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.522818] env[62519]: DEBUG oslo_concurrency.lockutils [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "af422ca1-7966-4bed-97bf-2b4c5285eaab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.522818] env[62519]: DEBUG oslo_concurrency.lockutils [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "af422ca1-7966-4bed-97bf-2b4c5285eaab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.522818] env[62519]: DEBUG oslo_concurrency.lockutils [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "af422ca1-7966-4bed-97bf-2b4c5285eaab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.525755] env[62519]: INFO nova.compute.manager [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Terminating instance [ 1761.555355] env[62519]: DEBUG nova.scheduler.client.report [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1761.555679] env[62519]: DEBUG nova.compute.provider_tree [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 117 to 118 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1761.555835] env[62519]: DEBUG nova.compute.provider_tree [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1761.716654] env[62519]: DEBUG nova.objects.instance [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lazy-loading 'flavor' on Instance uuid 88f9351c-253b-49dd-a88e-b8585ea742ac {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1761.769785] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52354579-8c31-5be9-4691-996047dd7bac, 'name': SearchDatastore_Task, 'duration_secs': 0.040844} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.770081] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.770344] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ee4b10ba-1c56-47cf-a528-d6e65c286ddb/ee4b10ba-1c56-47cf-a528-d6e65c286ddb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1761.770604] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66f47e46-b396-4c53-a0eb-c6d3fa3e1cf0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.782495] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1761.782495] env[62519]: value = "task-1802897" [ 1761.782495] env[62519]: _type = "Task" [ 1761.782495] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.795280] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802897, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.889891] env[62519]: DEBUG nova.compute.manager [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1761.890157] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1761.891523] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ddc04b-9e54-4c80-96f2-2395c199b773 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.901146] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1761.901440] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1203d1f3-26d5-4dd0-8391-244376684444 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.910024] env[62519]: DEBUG oslo_vmware.api [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1761.910024] env[62519]: value = "task-1802898" [ 1761.910024] env[62519]: _type = "Task" [ 1761.910024] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.922114] env[62519]: DEBUG oslo_vmware.api [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.933305] env[62519]: DEBUG nova.compute.manager [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1761.933574] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1761.934655] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6906972c-a909-40d6-a6cf-9cdff05d9908 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.945194] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1761.945526] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a25d952-d5f3-4e8f-95f5-6244bd490165 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.953626] env[62519]: DEBUG oslo_vmware.api [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1761.953626] env[62519]: value = "task-1802899" [ 1761.953626] env[62519]: _type = "Task" [ 1761.953626] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.965038] env[62519]: DEBUG oslo_vmware.api [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802899, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.990955] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "refresh_cache-2ea8304e-5b91-4908-a876-6e2c780b1da9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.991386] env[62519]: DEBUG nova.compute.manager [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Instance network_info: |[{"id": "224051cd-32a2-49fc-b4a3-523f444e85a2", "address": "fa:16:3e:85:a2:ec", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224051cd-32", "ovs_interfaceid": "224051cd-32a2-49fc-b4a3-523f444e85a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1761.991842] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:a2:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '224051cd-32a2-49fc-b4a3-523f444e85a2', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1762.000603] env[62519]: DEBUG oslo.service.loopingcall [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1762.001820] env[62519]: DEBUG nova.compute.manager [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1762.004582] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1762.007641] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-987fc458-a24c-464a-abd7-671caa788d03 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.030022] env[62519]: DEBUG nova.compute.manager [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1762.030296] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1762.034023] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e567e7-d7b1-4d48-bcfc-507631f27cc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.037463] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1762.037463] env[62519]: value = "task-1802900" [ 1762.037463] env[62519]: _type = "Task" [ 1762.037463] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.043823] env[62519]: DEBUG nova.objects.instance [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1762.048956] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1762.049267] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1762.049462] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1762.049664] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1762.049837] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1762.049993] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1762.050249] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1762.050423] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1762.050609] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1762.050794] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1762.050976] env[62519]: DEBUG nova.virt.hardware [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1762.052954] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966235d5-96da-45d5-adc2-ca0155b11301 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.062810] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1762.062810] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802900, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.064107] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84fe2df2-badc-4660-89b3-827bf79c7307 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.070645] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.080268] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.887s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.080603] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.083655] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.349s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.084032] env[62519]: DEBUG nova.objects.instance [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lazy-loading 'resources' on Instance uuid e3a19583-b332-40e3-bdd0-d254f7a78b0a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1762.090963] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b026705f-c06a-4ac4-8586-dfae41b3a557 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.102381] env[62519]: DEBUG oslo_vmware.api [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1762.102381] env[62519]: value = "task-1802901" [ 1762.102381] env[62519]: _type = "Task" [ 1762.102381] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.124450] env[62519]: INFO nova.scheduler.client.report [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Deleted allocations for instance d710c97b-a2fd-4a54-baaa-ec7664895ce7 [ 1762.131258] env[62519]: DEBUG oslo_vmware.api [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802901, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.132870] env[62519]: INFO nova.scheduler.client.report [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted allocations for instance ad0af10d-5063-4344-b12f-1d3ee9ea1090 [ 1762.198545] env[62519]: DEBUG nova.network.neutron [-] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.222722] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3fcdc708-adeb-4150-ac24-b9b0d07e602e tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.853s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.297277] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802897, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.425841] env[62519]: DEBUG oslo_vmware.api [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802898, 'name': PowerOffVM_Task, 'duration_secs': 0.235873} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.426096] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1762.426291] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1762.426741] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-282db43c-cf88-444d-87eb-bf61497de975 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.466116] env[62519]: DEBUG oslo_vmware.api [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802899, 'name': PowerOffVM_Task, 'duration_secs': 0.35669} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.466431] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1762.466606] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1762.466957] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-484cf5e9-cff0-4d53-8e85-b45e841e9564 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.494933] env[62519]: DEBUG nova.network.neutron [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updated VIF entry in instance network info cache for port af20c500-c1f3-427c-a59d-e8a17b0a6945. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1762.495362] env[62519]: DEBUG nova.network.neutron [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updating instance_info_cache with network_info: [{"id": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "address": "fa:16:3e:2a:c1:c6", "network": {"id": "e0faae63-5ec4-4eef-a323-174dc4b623b5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1005479736-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "7336e5aa2f9b42789cec48f07e586876", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf20c500-c1", "ovs_interfaceid": "af20c500-c1f3-427c-a59d-e8a17b0a6945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.552538] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802900, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.553425] env[62519]: DEBUG oslo_concurrency.lockutils [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1762.553699] env[62519]: DEBUG oslo_concurrency.lockutils [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1762.553908] env[62519]: DEBUG nova.network.neutron [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1762.554099] env[62519]: DEBUG nova.objects.instance [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'info_cache' on Instance uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1762.628131] env[62519]: DEBUG oslo_vmware.api [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802901, 'name': PowerOffVM_Task, 'duration_secs': 0.381961} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.628497] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1762.628906] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1762.629216] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2f1e03b-1481-4b99-9482-aa9704095c3c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.641042] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1762.641042] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1762.641042] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleting the datastore file [datastore1] 76786353-f93f-4e7e-b3f7-7f22ae4b7b41 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1762.641042] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf9c9303-df90-4cab-8b92-7b70a6af8eac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.644149] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1762.644149] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1762.644348] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Deleting the datastore file [datastore1] 156ed02a-3365-4a4f-b4de-ea86920d3baf {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1762.644727] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c0150d6a-b718-4a52-8600-3865c31af3f8 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "ad0af10d-5063-4344-b12f-1d3ee9ea1090" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.078s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.647573] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc4ed66f-2092-410e-8a05-9474d044490e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.652729] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fceb78c5-e39c-4dee-861a-4c99ebc86aec tempest-FloatingIPsAssociationNegativeTestJSON-1468566888 tempest-FloatingIPsAssociationNegativeTestJSON-1468566888-project-member] Lock "d710c97b-a2fd-4a54-baaa-ec7664895ce7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.908s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.658177] env[62519]: DEBUG oslo_vmware.api [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1762.658177] env[62519]: value = "task-1802905" [ 1762.658177] env[62519]: _type = "Task" [ 1762.658177] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.664625] env[62519]: DEBUG oslo_vmware.api [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for the task: (returnval){ [ 1762.664625] env[62519]: value = "task-1802906" [ 1762.664625] env[62519]: _type = "Task" [ 1762.664625] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.674849] env[62519]: DEBUG oslo_vmware.api [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802905, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.682504] env[62519]: DEBUG oslo_vmware.api [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.708494] env[62519]: INFO nova.compute.manager [-] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Took 1.54 seconds to deallocate network for instance. [ 1762.796378] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802897, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.724783} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.796683] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ee4b10ba-1c56-47cf-a528-d6e65c286ddb/ee4b10ba-1c56-47cf-a528-d6e65c286ddb.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1762.796948] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1762.797163] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-846d036a-4e1f-4059-9e8e-a65c480f5ff8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.805877] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1762.805877] env[62519]: value = "task-1802907" [ 1762.805877] env[62519]: _type = "Task" [ 1762.805877] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.831664] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802907, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.870498] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1762.870498] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1762.870498] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleting the datastore file [datastore1] af422ca1-7966-4bed-97bf-2b4c5285eaab {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1762.870498] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c7b44ea-fddb-405a-8b22-86c2d2e6204b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.881562] env[62519]: DEBUG oslo_vmware.api [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for the task: (returnval){ [ 1762.881562] env[62519]: value = "task-1802908" [ 1762.881562] env[62519]: _type = "Task" [ 1762.881562] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.892121] env[62519]: DEBUG oslo_vmware.api [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.983024] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd53793-e804-4cda-b9d6-2773ba582a22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.992203] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e395c645-de95-4b26-9708-c09ec9db18b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.998952] env[62519]: DEBUG oslo_concurrency.lockutils [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] Releasing lock "refresh_cache-156ed02a-3365-4a4f-b4de-ea86920d3baf" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1762.999224] env[62519]: DEBUG nova.compute.manager [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Received event network-vif-plugged-224051cd-32a2-49fc-b4a3-523f444e85a2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1762.999418] env[62519]: DEBUG oslo_concurrency.lockutils [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] Acquiring lock "2ea8304e-5b91-4908-a876-6e2c780b1da9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.999658] env[62519]: DEBUG oslo_concurrency.lockutils [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.999892] env[62519]: DEBUG oslo_concurrency.lockutils [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.000021] env[62519]: DEBUG nova.compute.manager [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] No waiting events found dispatching network-vif-plugged-224051cd-32a2-49fc-b4a3-523f444e85a2 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1763.000218] env[62519]: WARNING nova.compute.manager [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Received unexpected event network-vif-plugged-224051cd-32a2-49fc-b4a3-523f444e85a2 for instance with vm_state building and task_state spawning. [ 1763.000418] env[62519]: DEBUG nova.compute.manager [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Received event network-changed-224051cd-32a2-49fc-b4a3-523f444e85a2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1763.000580] env[62519]: DEBUG nova.compute.manager [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Refreshing instance network info cache due to event network-changed-224051cd-32a2-49fc-b4a3-523f444e85a2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1763.000769] env[62519]: DEBUG oslo_concurrency.lockutils [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] Acquiring lock "refresh_cache-2ea8304e-5b91-4908-a876-6e2c780b1da9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.000967] env[62519]: DEBUG oslo_concurrency.lockutils [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] Acquired lock "refresh_cache-2ea8304e-5b91-4908-a876-6e2c780b1da9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.001122] env[62519]: DEBUG nova.network.neutron [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Refreshing network info cache for port 224051cd-32a2-49fc-b4a3-523f444e85a2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1763.033434] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8035a87c-81dc-47f6-bd7a-5fab49502050 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.045527] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8db45d5-47ee-4076-9fdd-5c46fb9356ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.055855] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802900, 'name': CreateVM_Task, 'duration_secs': 0.581081} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.064872] env[62519]: DEBUG nova.objects.base [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Object Instance<99f22198-1a65-4d0d-b665-90c7063dbdb9> lazy-loaded attributes: flavor,info_cache {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1763.065997] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1763.066776] env[62519]: DEBUG nova.compute.provider_tree [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1763.068515] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.068719] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.069011] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1763.069271] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a50cd72-ee15-4dca-9ac2-1485ac9c1417 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.075361] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1763.075361] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b7e7db-94d7-1f1d-5a4e-9ea8a2ae2da5" [ 1763.075361] env[62519]: _type = "Task" [ 1763.075361] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.085617] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b7e7db-94d7-1f1d-5a4e-9ea8a2ae2da5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.176737] env[62519]: DEBUG oslo_vmware.api [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802905, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435555} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.177479] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.177837] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1763.177948] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1763.178065] env[62519]: INFO nova.compute.manager [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1763.178311] env[62519]: DEBUG oslo.service.loopingcall [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1763.178820] env[62519]: DEBUG nova.compute.manager [-] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1763.178950] env[62519]: DEBUG nova.network.neutron [-] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1763.185807] env[62519]: DEBUG oslo_vmware.api [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.210438] env[62519]: INFO nova.compute.manager [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Rebuilding instance [ 1763.217425] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.260879] env[62519]: DEBUG nova.compute.manager [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1763.262053] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f22c33b-3390-4077-837d-310a5852c0b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.321982] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802907, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.226479} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.322256] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1763.323024] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091979d0-d1e1-4ed5-bc85-be959517532a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.353583] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] ee4b10ba-1c56-47cf-a528-d6e65c286ddb/ee4b10ba-1c56-47cf-a528-d6e65c286ddb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1763.356845] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97cfbdb3-b4a1-42ae-aff2-e3cf09b31043 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.381369] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1763.381369] env[62519]: value = "task-1802909" [ 1763.381369] env[62519]: _type = "Task" [ 1763.381369] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.397021] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802909, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.400605] env[62519]: DEBUG oslo_vmware.api [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Task: {'id': task-1802908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305119} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.400893] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.401082] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1763.401274] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1763.401432] env[62519]: INFO nova.compute.manager [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Took 1.37 seconds to destroy the instance on the hypervisor. [ 1763.401667] env[62519]: DEBUG oslo.service.loopingcall [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1763.401937] env[62519]: DEBUG nova.compute.manager [-] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1763.401937] env[62519]: DEBUG nova.network.neutron [-] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1763.504813] env[62519]: DEBUG nova.network.neutron [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Successfully updated port: f8e70085-af0c-4731-8b61-cf5cd2460344 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1763.575909] env[62519]: DEBUG nova.scheduler.client.report [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1763.589904] env[62519]: DEBUG nova.compute.manager [req-921f10ce-c061-4125-b108-b223f1964f65 req-d80d34fe-4358-44cb-9351-dd693ea8c1bd service nova] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Received event network-vif-plugged-f8e70085-af0c-4731-8b61-cf5cd2460344 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1763.590516] env[62519]: DEBUG oslo_concurrency.lockutils [req-921f10ce-c061-4125-b108-b223f1964f65 req-d80d34fe-4358-44cb-9351-dd693ea8c1bd service nova] Acquiring lock "5da884af-d8d2-409b-99bd-e5370e44e9f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.590861] env[62519]: DEBUG oslo_concurrency.lockutils [req-921f10ce-c061-4125-b108-b223f1964f65 req-d80d34fe-4358-44cb-9351-dd693ea8c1bd service nova] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.591244] env[62519]: DEBUG oslo_concurrency.lockutils [req-921f10ce-c061-4125-b108-b223f1964f65 req-d80d34fe-4358-44cb-9351-dd693ea8c1bd service nova] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.591535] env[62519]: DEBUG nova.compute.manager [req-921f10ce-c061-4125-b108-b223f1964f65 req-d80d34fe-4358-44cb-9351-dd693ea8c1bd service nova] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] No waiting events found dispatching network-vif-plugged-f8e70085-af0c-4731-8b61-cf5cd2460344 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1763.592258] env[62519]: WARNING nova.compute.manager [req-921f10ce-c061-4125-b108-b223f1964f65 req-d80d34fe-4358-44cb-9351-dd693ea8c1bd service nova] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Received unexpected event network-vif-plugged-f8e70085-af0c-4731-8b61-cf5cd2460344 for instance with vm_state building and task_state spawning. [ 1763.600862] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b7e7db-94d7-1f1d-5a4e-9ea8a2ae2da5, 'name': SearchDatastore_Task, 'duration_secs': 0.029838} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.601308] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.602114] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1763.602495] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.602762] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.603170] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1763.604427] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc99a871-056e-4b0f-909b-efdbc769279c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.607781] env[62519]: DEBUG nova.compute.manager [req-eef043dd-6b19-4611-be2a-cddb04e9151a req-6677d5a6-3427-423f-be2c-41a3fdf29bbf service nova] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Received event network-vif-deleted-6040576e-e4b2-41ff-bd84-514c3ff9090c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1763.618883] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1763.618883] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1763.619780] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e0621ab-c6ac-4e4d-b600-79cfe5c88de3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.628022] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1763.628022] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dd4a4c-4b29-d86b-2eeb-41c55a3b3672" [ 1763.628022] env[62519]: _type = "Task" [ 1763.628022] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.636583] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dd4a4c-4b29-d86b-2eeb-41c55a3b3672, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.680216] env[62519]: DEBUG oslo_vmware.api [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Task: {'id': task-1802906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.580986} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.683701] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.683901] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1763.684093] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1763.684271] env[62519]: INFO nova.compute.manager [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Took 1.75 seconds to destroy the instance on the hypervisor. [ 1763.684511] env[62519]: DEBUG oslo.service.loopingcall [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1763.684710] env[62519]: DEBUG nova.compute.manager [-] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1763.684803] env[62519]: DEBUG nova.network.neutron [-] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1763.707959] env[62519]: DEBUG nova.network.neutron [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Updated VIF entry in instance network info cache for port 224051cd-32a2-49fc-b4a3-523f444e85a2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1763.708381] env[62519]: DEBUG nova.network.neutron [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Updating instance_info_cache with network_info: [{"id": "224051cd-32a2-49fc-b4a3-523f444e85a2", "address": "fa:16:3e:85:a2:ec", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224051cd-32", "ovs_interfaceid": "224051cd-32a2-49fc-b4a3-523f444e85a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.894317] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802909, 'name': ReconfigVM_Task, 'duration_secs': 0.344452} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.894633] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Reconfigured VM instance instance-00000053 to attach disk [datastore1] ee4b10ba-1c56-47cf-a528-d6e65c286ddb/ee4b10ba-1c56-47cf-a528-d6e65c286ddb.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1763.895271] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8e79a9a-1c57-45f8-80e5-72bd8a4ae0ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.903341] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1763.903341] env[62519]: value = "task-1802910" [ 1763.903341] env[62519]: _type = "Task" [ 1763.903341] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.912475] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802910, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.012905] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.013091] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.013252] env[62519]: DEBUG nova.network.neutron [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1764.082907] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.999s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.088023] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.390s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.088526] env[62519]: INFO nova.compute.claims [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1764.117100] env[62519]: INFO nova.scheduler.client.report [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted allocations for instance e3a19583-b332-40e3-bdd0-d254f7a78b0a [ 1764.150022] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dd4a4c-4b29-d86b-2eeb-41c55a3b3672, 'name': SearchDatastore_Task, 'duration_secs': 0.012671} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.150821] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dabf2064-8476-4fcb-b62c-77dfbab5e7f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.161511] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1764.161511] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524c9b65-8596-3efa-3823-9b6687a9381f" [ 1764.161511] env[62519]: _type = "Task" [ 1764.161511] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.170932] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524c9b65-8596-3efa-3823-9b6687a9381f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.212397] env[62519]: DEBUG oslo_concurrency.lockutils [req-0177f4f4-8629-426e-a0ad-6f7c2c74d801 req-e80c75b6-68fa-406d-bc49-5528c89fcc08 service nova] Releasing lock "refresh_cache-2ea8304e-5b91-4908-a876-6e2c780b1da9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.281351] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1764.281693] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03bbe9a6-4b51-4c9c-8214-7a4dcde8bbe2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.292067] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1764.292067] env[62519]: value = "task-1802911" [ 1764.292067] env[62519]: _type = "Task" [ 1764.292067] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.304244] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.411783] env[62519]: DEBUG nova.network.neutron [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updating instance_info_cache with network_info: [{"id": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "address": "fa:16:3e:cb:3e:93", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cb0499-3e", "ovs_interfaceid": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.417345] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802910, 'name': Rename_Task, 'duration_secs': 0.170008} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.417937] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1764.418884] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9eb4f512-9406-4343-82df-4375cb4770e2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.430129] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1764.430129] env[62519]: value = "task-1802912" [ 1764.430129] env[62519]: _type = "Task" [ 1764.430129] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.450368] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.538813] env[62519]: DEBUG nova.network.neutron [-] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.558598] env[62519]: DEBUG nova.network.neutron [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1764.625434] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3222e7b5-02b9-4fbc-bf91-05dacee08ed0 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "e3a19583-b332-40e3-bdd0-d254f7a78b0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.540s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.673092] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524c9b65-8596-3efa-3823-9b6687a9381f, 'name': SearchDatastore_Task, 'duration_secs': 0.022913} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.681020] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.681020] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2ea8304e-5b91-4908-a876-6e2c780b1da9/2ea8304e-5b91-4908-a876-6e2c780b1da9.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1764.681020] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed4393c2-eb72-4e31-b468-b6ee84e2e65b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.689549] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1764.689549] env[62519]: value = "task-1802913" [ 1764.689549] env[62519]: _type = "Task" [ 1764.689549] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.700851] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.705219] env[62519]: DEBUG nova.network.neutron [-] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.806821] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802911, 'name': PowerOffVM_Task, 'duration_secs': 0.221259} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.807202] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1764.852161] env[62519]: DEBUG nova.network.neutron [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance_info_cache with network_info: [{"id": "f8e70085-af0c-4731-8b61-cf5cd2460344", "address": "fa:16:3e:4f:98:8a", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8e70085-af", "ovs_interfaceid": "f8e70085-af0c-4731-8b61-cf5cd2460344", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.919843] env[62519]: DEBUG oslo_concurrency.lockutils [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Releasing lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.929917] env[62519]: INFO nova.compute.manager [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Detaching volume 5bd7c4a6-d552-4040-aefb-7d5577b89149 [ 1764.946244] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802912, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.982905] env[62519]: INFO nova.virt.block_device [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Attempting to driver detach volume 5bd7c4a6-d552-4040-aefb-7d5577b89149 from mountpoint /dev/sdb [ 1764.983191] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1764.983400] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1764.985962] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950ab2f8-6a43-4679-ae91-462b98282b56 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.014164] env[62519]: DEBUG nova.network.neutron [-] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1765.017331] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157d6e89-24e6-419a-b666-1d91ac9f99e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.039726] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8198be-7c1d-4191-9b03-358dd39cef3c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.043526] env[62519]: INFO nova.compute.manager [-] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Took 1.86 seconds to deallocate network for instance. [ 1765.073143] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879bc307-1086-481d-8fec-fb86157ac519 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.102033] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] The volume has not been displaced from its original location: [datastore1] volume-5bd7c4a6-d552-4040-aefb-7d5577b89149/volume-5bd7c4a6-d552-4040-aefb-7d5577b89149.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1765.106585] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfiguring VM instance instance-00000047 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1765.108318] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cb4c07b-ede7-44e1-8e08-9aa31a3eea3a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.143364] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1765.143364] env[62519]: value = "task-1802914" [ 1765.143364] env[62519]: _type = "Task" [ 1765.143364] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.156511] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802914, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.204213] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802913, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.206911] env[62519]: INFO nova.compute.manager [-] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Took 1.80 seconds to deallocate network for instance. [ 1765.356670] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.357224] env[62519]: DEBUG nova.compute.manager [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Instance network_info: |[{"id": "f8e70085-af0c-4731-8b61-cf5cd2460344", "address": "fa:16:3e:4f:98:8a", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8e70085-af", "ovs_interfaceid": "f8e70085-af0c-4731-8b61-cf5cd2460344", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1765.359169] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:98:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8e70085-af0c-4731-8b61-cf5cd2460344', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1765.370938] env[62519]: DEBUG oslo.service.loopingcall [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1765.374324] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1765.375262] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22341228-921f-4efc-93b5-a6b14a0e2280 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.408199] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1765.408199] env[62519]: value = "task-1802915" [ 1765.408199] env[62519]: _type = "Task" [ 1765.408199] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.420696] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802915, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.443634] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802912, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.524165] env[62519]: INFO nova.compute.manager [-] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Took 1.84 seconds to deallocate network for instance. [ 1765.531291] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0c0dd7-d046-4919-9701-2654c367efbb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.540487] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce1bf32-974c-43d3-b3fb-a8050039bf74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.580724] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.581996] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324814cd-f5cb-420f-b72c-a143196c28d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.592577] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e15fa9-5e33-4d15-9c3a-b70e5d165e58 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.611190] env[62519]: DEBUG nova.compute.provider_tree [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1765.622732] env[62519]: DEBUG nova.compute.manager [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Received event network-changed-f8e70085-af0c-4731-8b61-cf5cd2460344 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1765.623318] env[62519]: DEBUG nova.compute.manager [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Refreshing instance network info cache due to event network-changed-f8e70085-af0c-4731-8b61-cf5cd2460344. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1765.623318] env[62519]: DEBUG oslo_concurrency.lockutils [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] Acquiring lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.623482] env[62519]: DEBUG oslo_concurrency.lockutils [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] Acquired lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.623482] env[62519]: DEBUG nova.network.neutron [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Refreshing network info cache for port f8e70085-af0c-4731-8b61-cf5cd2460344 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1765.660173] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802914, 'name': ReconfigVM_Task, 'duration_secs': 0.417421} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.660524] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfigured VM instance instance-00000047 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1765.665562] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec20d420-ad52-4745-b48f-5e0cb72337dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.684605] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1765.684605] env[62519]: value = "task-1802916" [ 1765.684605] env[62519]: _type = "Task" [ 1765.684605] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.695582] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802916, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.705108] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668611} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.705407] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2ea8304e-5b91-4908-a876-6e2c780b1da9/2ea8304e-5b91-4908-a876-6e2c780b1da9.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1765.705634] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1765.706540] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2393a8e9-82ea-45d9-8cf3-ab40df258b44 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.714144] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1765.714144] env[62519]: value = "task-1802917" [ 1765.714144] env[62519]: _type = "Task" [ 1765.714144] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.719035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.727968] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802917, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.918938] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802915, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.932150] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1765.932549] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38281f10-544e-41c7-a18e-d8bc8a09a919 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.944960] env[62519]: DEBUG oslo_vmware.api [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802912, 'name': PowerOnVM_Task, 'duration_secs': 1.059841} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.946422] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1765.947092] env[62519]: INFO nova.compute.manager [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Took 10.24 seconds to spawn the instance on the hypervisor. [ 1765.947092] env[62519]: DEBUG nova.compute.manager [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1765.947288] env[62519]: DEBUG oslo_vmware.api [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1765.947288] env[62519]: value = "task-1802918" [ 1765.947288] env[62519]: _type = "Task" [ 1765.947288] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.947875] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff2f241-3c5f-48bc-914e-ab558b49af70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.961925] env[62519]: DEBUG oslo_vmware.api [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802918, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.036688] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.069340] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "358ee402-1112-4eea-a518-a45a6bf92c31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.069612] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "358ee402-1112-4eea-a518-a45a6bf92c31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.114374] env[62519]: DEBUG nova.scheduler.client.report [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1766.195725] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802916, 'name': ReconfigVM_Task, 'duration_secs': 0.366386} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.196020] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1766.224724] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802917, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094883} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.225595] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1766.226037] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5659b7c3-c668-4d21-bf62-b4e9c91d8dd3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.248874] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 2ea8304e-5b91-4908-a876-6e2c780b1da9/2ea8304e-5b91-4908-a876-6e2c780b1da9.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1766.251466] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2aa076c3-4f89-43d5-9c6d-ec4e2a640acf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.276093] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1766.276093] env[62519]: value = "task-1802919" [ 1766.276093] env[62519]: _type = "Task" [ 1766.276093] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.284443] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802919, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.420985] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802915, 'name': CreateVM_Task, 'duration_secs': 0.51869} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.421527] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1766.422550] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.422735] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.423104] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1766.423406] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbec7e1f-bd3f-446f-935e-a045471a11e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.432988] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1766.432988] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b01ee2-a11d-0f8a-d24f-e747f176c1b5" [ 1766.432988] env[62519]: _type = "Task" [ 1766.432988] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.443988] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b01ee2-a11d-0f8a-d24f-e747f176c1b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.462280] env[62519]: DEBUG oslo_vmware.api [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802918, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.475813] env[62519]: INFO nova.compute.manager [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Took 31.51 seconds to build instance. [ 1766.489746] env[62519]: DEBUG nova.network.neutron [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updated VIF entry in instance network info cache for port f8e70085-af0c-4731-8b61-cf5cd2460344. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1766.490014] env[62519]: DEBUG nova.network.neutron [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance_info_cache with network_info: [{"id": "f8e70085-af0c-4731-8b61-cf5cd2460344", "address": "fa:16:3e:4f:98:8a", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8e70085-af", "ovs_interfaceid": "f8e70085-af0c-4731-8b61-cf5cd2460344", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.573456] env[62519]: DEBUG nova.compute.manager [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1766.619252] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.533s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.619729] env[62519]: DEBUG nova.compute.manager [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1766.624324] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.317s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.624404] env[62519]: INFO nova.compute.claims [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1766.794686] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.954562] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b01ee2-a11d-0f8a-d24f-e747f176c1b5, 'name': SearchDatastore_Task, 'duration_secs': 0.013352} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.958587] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.959053] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1766.959345] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.959541] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.959763] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1766.960086] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-440c4cbd-7ba3-40f4-b360-a50ea8e5deb5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.968752] env[62519]: DEBUG oslo_vmware.api [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802918, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.975847] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1766.976091] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1766.976916] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6fca26c-96c7-4802-8ce8-8b84df78ba7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.985046] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1766.985046] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d76356-2a24-4c54-d4af-a350910d55df" [ 1766.985046] env[62519]: _type = "Task" [ 1766.985046] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.997374] env[62519]: DEBUG oslo_concurrency.lockutils [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] Releasing lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.997675] env[62519]: DEBUG nova.compute.manager [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Received event network-vif-deleted-5fcd6d6f-381e-473c-a8b8-40f2f0ec3276 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1766.997861] env[62519]: DEBUG nova.compute.manager [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Received event network-vif-deleted-fc6b82fe-716c-40e8-b027-ffa8ee2ea408 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1766.998041] env[62519]: DEBUG nova.compute.manager [req-c1ea23b5-0847-4aa1-9bd5-c92d46115c77 req-f951cfec-f788-40f9-845b-25c75ab91123 service nova] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Received event network-vif-deleted-af20c500-c1f3-427c-a59d-e8a17b0a6945 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1767.001487] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d76356-2a24-4c54-d4af-a350910d55df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.098776] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.125086] env[62519]: DEBUG nova.compute.utils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1767.126592] env[62519]: DEBUG nova.compute.manager [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1767.126838] env[62519]: DEBUG nova.network.neutron [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1767.194893] env[62519]: DEBUG nova.policy [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '158d93cf743840a8be4e192e6bd4b8bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd73e71476254453fb23164dce09c6d41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1767.252118] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1767.252118] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bfee335-9f94-4917-93cb-8927348d903a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.263295] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1767.263295] env[62519]: value = "task-1802920" [ 1767.263295] env[62519]: _type = "Task" [ 1767.263295] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.275915] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1767.276192] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1767.276389] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1767.277274] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a934d88-3d23-4045-af9f-2b9b09a6d344 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.292842] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802919, 'name': ReconfigVM_Task, 'duration_secs': 0.576662} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.308446] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 2ea8304e-5b91-4908-a876-6e2c780b1da9/2ea8304e-5b91-4908-a876-6e2c780b1da9.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1767.309539] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7455e770-fa19-4a1f-b546-afced9cf93d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.312230] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d3baa0-31c6-457e-ad1d-894cf90f8725 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.325270] env[62519]: WARNING nova.virt.vmwareapi.driver [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1767.325715] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1767.328605] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fc7ff7-6a55-4401-bfaf-2239c0fe5606 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.333980] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1767.333980] env[62519]: value = "task-1802921" [ 1767.333980] env[62519]: _type = "Task" [ 1767.333980] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.344214] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1767.345398] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2da2e3f8-724e-440e-977e-3b247d798269 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.353260] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802921, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.470304] env[62519]: DEBUG oslo_vmware.api [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802918, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.500313] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d76356-2a24-4c54-d4af-a350910d55df, 'name': SearchDatastore_Task, 'duration_secs': 0.070587} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.501785] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57d750b0-d082-486d-9922-1116e3ce20a9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.511556] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1767.511556] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a0f483-f3f5-c3d0-c716-61d0d8468550" [ 1767.511556] env[62519]: _type = "Task" [ 1767.511556] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.530038] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a0f483-f3f5-c3d0-c716-61d0d8468550, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.540915] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1767.544879] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1767.544879] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleting the datastore file [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1767.544879] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb975937-3a2a-440c-86d8-87a543021d9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.551525] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1767.551525] env[62519]: value = "task-1802923" [ 1767.551525] env[62519]: _type = "Task" [ 1767.551525] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.562374] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802923, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.631142] env[62519]: DEBUG nova.compute.manager [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1767.769808] env[62519]: DEBUG nova.network.neutron [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Successfully created port: f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1767.854679] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802921, 'name': Rename_Task, 'duration_secs': 0.247119} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.855129] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1767.855483] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7ef5922-eb8e-4168-9919-15cd5fd268d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.868695] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1767.868695] env[62519]: value = "task-1802924" [ 1767.868695] env[62519]: _type = "Task" [ 1767.868695] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.882231] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802924, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.968113] env[62519]: DEBUG oslo_vmware.api [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1802918, 'name': PowerOnVM_Task, 'duration_secs': 1.580725} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.971451] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1767.971701] env[62519]: DEBUG nova.compute.manager [None req-323daec2-ec4b-4288-a31b-a050d5222299 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1767.973613] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b158c9-e748-4567-8dc6-ea155818b051 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.986814] env[62519]: DEBUG oslo_concurrency.lockutils [None req-84dc4058-6eb4-4380-b9f1-346dccf99218 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.027s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.030888] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a0f483-f3f5-c3d0-c716-61d0d8468550, 'name': SearchDatastore_Task, 'duration_secs': 0.016935} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.030888] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.030888] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 5da884af-d8d2-409b-99bd-e5370e44e9f0/5da884af-d8d2-409b-99bd-e5370e44e9f0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1768.030888] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-095d0f7e-9f31-4c47-8a98-f6fc382dc626 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.043299] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1768.043299] env[62519]: value = "task-1802925" [ 1768.043299] env[62519]: _type = "Task" [ 1768.043299] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.064838] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.067597] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc772ee-37d4-4806-85a1-9edd0304d35c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.077832] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802923, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.437299} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.078298] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1768.078683] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1768.078928] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1768.082856] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9f9ba3-07bc-4c5d-935b-d249fc6d5109 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.118395] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9c1261-55e1-4237-ace2-e01bca222c2c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.129070] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc39bbad-cb28-40a6-bac0-fdea9bce89a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.149950] env[62519]: DEBUG nova.compute.provider_tree [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.382587] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802924, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.556269] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802925, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.589944] env[62519]: INFO nova.virt.block_device [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Booting with volume 5bd7c4a6-d552-4040-aefb-7d5577b89149 at /dev/sdb [ 1768.647666] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b39b4e87-3498-4975-b816-cb4428febc16 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.656796] env[62519]: DEBUG nova.scheduler.client.report [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1768.661599] env[62519]: DEBUG nova.compute.manager [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1768.670103] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e018d91-1939-4eea-ba38-ccd51e47438c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.696432] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1768.696844] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1768.696927] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1768.697147] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1768.697406] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1768.697605] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1768.697940] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1768.698174] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1768.698385] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1768.698621] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1768.698886] env[62519]: DEBUG nova.virt.hardware [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1768.701011] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783119b1-7282-417d-b42f-93fb2705d56c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.718044] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05d5367f-bd83-4bb0-a1ec-3486c5507668 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.726944] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd542a73-490f-4252-9565-328fb5f2f2e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.737445] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87371530-7dc2-4036-b9ee-b00ed25131d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.782535] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728e9f58-d1d2-49f4-b33b-d5e43bc645bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.793848] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6520863a-8967-45d3-a144-8cf19c7db61e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.811115] env[62519]: DEBUG nova.virt.block_device [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Updating existing volume attachment record: 4a3a274a-374f-454a-8765-be71583331a1 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1768.882520] env[62519]: DEBUG oslo_vmware.api [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802924, 'name': PowerOnVM_Task, 'duration_secs': 0.58181} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.882520] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1768.882672] env[62519]: INFO nova.compute.manager [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Took 10.59 seconds to spawn the instance on the hypervisor. [ 1768.882771] env[62519]: DEBUG nova.compute.manager [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1768.883580] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede4a1d0-8e03-4cfe-82b6-796c7d355d3b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.057401] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689577} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.058245] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 5da884af-d8d2-409b-99bd-e5370e44e9f0/5da884af-d8d2-409b-99bd-e5370e44e9f0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1769.058691] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1769.059093] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9bada136-b947-44e7-ae59-1ac8354ba20d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.071205] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1769.071205] env[62519]: value = "task-1802926" [ 1769.071205] env[62519]: _type = "Task" [ 1769.071205] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.082416] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802926, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.168649] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.168649] env[62519]: DEBUG nova.compute.manager [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1769.170463] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 13.382s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.170463] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.170463] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1769.170668] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.034s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.170793] env[62519]: DEBUG nova.objects.instance [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lazy-loading 'resources' on Instance uuid c8816718-0a35-4474-b162-c619b0acc154 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1769.172808] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac94c4b-f11a-4599-9d4c-cab38b171632 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.188856] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b0da14-46b3-4d86-b9cd-12cb431d2e71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.205389] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1b032d-1c37-45fe-a29f-f87df50a77e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.217315] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80aaca7-8547-4b8d-b68d-67c20fa7e4d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.253527] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178963MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1769.253694] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.404836] env[62519]: INFO nova.compute.manager [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Took 33.37 seconds to build instance. [ 1769.584512] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802926, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079042} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.584893] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1769.585934] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f3dcfc-885e-419b-9f2c-af06d6f69726 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.615456] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 5da884af-d8d2-409b-99bd-e5370e44e9f0/5da884af-d8d2-409b-99bd-e5370e44e9f0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1769.615725] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ddc10938-bf3c-4904-ad40-3f762d557418 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.632534] env[62519]: DEBUG nova.network.neutron [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Successfully updated port: f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1769.639665] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1769.639665] env[62519]: value = "task-1802927" [ 1769.639665] env[62519]: _type = "Task" [ 1769.639665] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.650711] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802927, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.670548] env[62519]: DEBUG nova.compute.manager [req-3a813dfd-78df-49d6-801a-33fb173e96f4 req-2e688840-c2a7-4c22-a84a-968ab6d07990 service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Received event network-vif-plugged-f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1769.670828] env[62519]: DEBUG oslo_concurrency.lockutils [req-3a813dfd-78df-49d6-801a-33fb173e96f4 req-2e688840-c2a7-4c22-a84a-968ab6d07990 service nova] Acquiring lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.671102] env[62519]: DEBUG oslo_concurrency.lockutils [req-3a813dfd-78df-49d6-801a-33fb173e96f4 req-2e688840-c2a7-4c22-a84a-968ab6d07990 service nova] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.671381] env[62519]: DEBUG oslo_concurrency.lockutils [req-3a813dfd-78df-49d6-801a-33fb173e96f4 req-2e688840-c2a7-4c22-a84a-968ab6d07990 service nova] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.671628] env[62519]: DEBUG nova.compute.manager [req-3a813dfd-78df-49d6-801a-33fb173e96f4 req-2e688840-c2a7-4c22-a84a-968ab6d07990 service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] No waiting events found dispatching network-vif-plugged-f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1769.671829] env[62519]: WARNING nova.compute.manager [req-3a813dfd-78df-49d6-801a-33fb173e96f4 req-2e688840-c2a7-4c22-a84a-968ab6d07990 service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Received unexpected event network-vif-plugged-f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 for instance with vm_state building and task_state spawning. [ 1769.673417] env[62519]: DEBUG nova.compute.utils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1769.675022] env[62519]: DEBUG nova.compute.manager [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1769.675149] env[62519]: DEBUG nova.network.neutron [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1769.762523] env[62519]: DEBUG nova.policy [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40345f676433487596a5d1ca0612155b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f19ee01b59dc4df2889524f41f227f56', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1769.929135] env[62519]: INFO nova.compute.manager [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Rescuing [ 1769.929651] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "refresh_cache-2ea8304e-5b91-4908-a876-6e2c780b1da9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.930049] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "refresh_cache-2ea8304e-5b91-4908-a876-6e2c780b1da9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.932051] env[62519]: DEBUG nova.network.neutron [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1770.067423] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf202d80-0adf-45c7-82d1-f01da9ae26ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.077071] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242acd65-df91-4d87-8b2f-809ee78c3961 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.113492] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420d4cf4-e45b-44f8-875d-b0bdbe848588 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.119556] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c259193-9bff-42c1-a84f-1ebf5027a3ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.140468] env[62519]: DEBUG nova.compute.provider_tree [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1770.140468] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.140468] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.140468] env[62519]: DEBUG nova.network.neutron [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1770.150697] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802927, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.178737] env[62519]: DEBUG nova.compute.manager [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1770.257067] env[62519]: DEBUG nova.network.neutron [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Successfully created port: 76d28a50-96fe-4a32-b58f-7f17862055da {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1770.642293] env[62519]: DEBUG nova.scheduler.client.report [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1770.662544] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802927, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.728638] env[62519]: DEBUG nova.network.neutron [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1770.912413] env[62519]: DEBUG nova.network.neutron [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Updating instance_info_cache with network_info: [{"id": "224051cd-32a2-49fc-b4a3-523f444e85a2", "address": "fa:16:3e:85:a2:ec", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap224051cd-32", "ovs_interfaceid": "224051cd-32a2-49fc-b4a3-523f444e85a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.919662] env[62519]: DEBUG oslo_concurrency.lockutils [None req-12f12914-bee9-4598-82bd-c3b629c1401f tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.897s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.954546] env[62519]: DEBUG nova.network.neutron [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance_info_cache with network_info: [{"id": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "address": "fa:16:3e:6c:b5:c8", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8b7229e-2b", "ovs_interfaceid": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.986280] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1770.986569] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1770.986791] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1770.987067] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1770.987284] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1770.987521] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1770.987905] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1770.988213] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1770.988561] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1770.988816] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1770.989130] env[62519]: DEBUG nova.virt.hardware [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1770.990509] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8aa1dc-9efd-4bd8-98bf-b0215b216690 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.001574] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b48c2a-792e-451d-b66b-121f432b2b1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.019237] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:9b:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0eeb5fa-bf84-45a4-a90f-85d593feed7e', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1771.027708] env[62519]: DEBUG oslo.service.loopingcall [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1771.028606] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1771.028868] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-420f1259-15d8-4362-95d7-4b615fd57a69 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.052240] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1771.052240] env[62519]: value = "task-1802928" [ 1771.052240] env[62519]: _type = "Task" [ 1771.052240] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.062810] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802928, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.152598] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.982s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.155457] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.155796] env[62519]: DEBUG nova.objects.instance [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lazy-loading 'resources' on Instance uuid 765cf18e-53a0-4cc6-ad0e-337a6f68915c {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1771.166123] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802927, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.175571] env[62519]: INFO nova.scheduler.client.report [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted allocations for instance c8816718-0a35-4474-b162-c619b0acc154 [ 1771.193158] env[62519]: DEBUG nova.compute.manager [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1771.234480] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1771.234929] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1771.235014] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1771.235186] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1771.235358] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1771.235535] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1771.235777] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1771.236018] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1771.236289] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1771.236490] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1771.236701] env[62519]: DEBUG nova.virt.hardware [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1771.237655] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c098b19b-e8c0-4769-8ef7-b40ce8c01367 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.248862] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235d4de2-b4c7-4258-a55b-e24553851308 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.416316] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "refresh_cache-2ea8304e-5b91-4908-a876-6e2c780b1da9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.461350] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.461350] env[62519]: DEBUG nova.compute.manager [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Instance network_info: |[{"id": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "address": "fa:16:3e:6c:b5:c8", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8b7229e-2b", "ovs_interfaceid": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1771.461350] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:b5:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1771.471243] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating folder: Project (d73e71476254453fb23164dce09c6d41). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1771.471602] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8361a9c1-0f8d-49d4-bf69-965bed028eaa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.486272] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Created folder: Project (d73e71476254453fb23164dce09c6d41) in parent group-v373567. [ 1771.486551] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating folder: Instances. Parent ref: group-v373795. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1771.487148] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a05eae6a-fb89-41b2-8632-b1e70719d117 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.501457] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Created folder: Instances in parent group-v373795. [ 1771.501747] env[62519]: DEBUG oslo.service.loopingcall [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1771.501959] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1771.502199] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90335959-e8a2-42cc-a527-9ed9c5f473ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.525671] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1771.525671] env[62519]: value = "task-1802931" [ 1771.525671] env[62519]: _type = "Task" [ 1771.525671] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.535401] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802931, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.563988] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802928, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.598282] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquiring lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.598562] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.661626] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802927, 'name': ReconfigVM_Task, 'duration_secs': 1.831227} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.662164] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 5da884af-d8d2-409b-99bd-e5370e44e9f0/5da884af-d8d2-409b-99bd-e5370e44e9f0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1771.663155] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-518a11fe-1e6b-4d31-b605-56acc26cebc3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.671670] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1771.671670] env[62519]: value = "task-1802932" [ 1771.671670] env[62519]: _type = "Task" [ 1771.671670] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.686549] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802932, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.687321] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7937b940-c110-4d34-9726-761103aa71cf tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "c8816718-0a35-4474-b162-c619b0acc154" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.892s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.761696] env[62519]: DEBUG nova.compute.manager [req-a4b58859-4ad8-454e-83ae-1bec1b817a4f req-af70bd3c-be8e-45d4-8eb9-21efc47228a0 service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Received event network-changed-f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1771.762159] env[62519]: DEBUG nova.compute.manager [req-a4b58859-4ad8-454e-83ae-1bec1b817a4f req-af70bd3c-be8e-45d4-8eb9-21efc47228a0 service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Refreshing instance network info cache due to event network-changed-f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1771.762543] env[62519]: DEBUG oslo_concurrency.lockutils [req-a4b58859-4ad8-454e-83ae-1bec1b817a4f req-af70bd3c-be8e-45d4-8eb9-21efc47228a0 service nova] Acquiring lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.762830] env[62519]: DEBUG oslo_concurrency.lockutils [req-a4b58859-4ad8-454e-83ae-1bec1b817a4f req-af70bd3c-be8e-45d4-8eb9-21efc47228a0 service nova] Acquired lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.763151] env[62519]: DEBUG nova.network.neutron [req-a4b58859-4ad8-454e-83ae-1bec1b817a4f req-af70bd3c-be8e-45d4-8eb9-21efc47228a0 service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Refreshing network info cache for port f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1771.907673] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.907898] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.023823] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe87d8e3-89b6-4247-a4d5-265b8fc9c88a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.038151] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223c12e4-a809-4a84-a0c1-912664f008a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.042306] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802931, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.081667] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae4d95d-b3da-4bfd-a029-fcadfa2e4dcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.092975] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c490d821-7fd0-44fc-9061-ad001eea65ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.097281] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802928, 'name': CreateVM_Task, 'duration_secs': 0.767172} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.097554] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1772.099499] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.099499] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.099499] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1772.099499] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98868da7-b2b1-44c1-a63c-e953f376088b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.110413] env[62519]: DEBUG nova.compute.manager [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1772.114107] env[62519]: DEBUG nova.compute.provider_tree [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.118978] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1772.118978] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c6cf6-7065-fa85-6495-111fb412e18a" [ 1772.118978] env[62519]: _type = "Task" [ 1772.118978] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.130115] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c6cf6-7065-fa85-6495-111fb412e18a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.168315] env[62519]: DEBUG nova.network.neutron [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Successfully updated port: 76d28a50-96fe-4a32-b58f-7f17862055da {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1772.185189] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802932, 'name': Rename_Task, 'duration_secs': 0.192795} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.185451] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1772.185614] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b39fb033-8b73-4cb6-a707-484294180df2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.194927] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1772.194927] env[62519]: value = "task-1802933" [ 1772.194927] env[62519]: _type = "Task" [ 1772.194927] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.208599] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.410606] env[62519]: DEBUG nova.compute.manager [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1772.495502] env[62519]: DEBUG oslo_concurrency.lockutils [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "f0925a44-c15b-4415-99bc-1b2366292fe4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.496151] env[62519]: DEBUG oslo_concurrency.lockutils [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "f0925a44-c15b-4415-99bc-1b2366292fe4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.496151] env[62519]: DEBUG oslo_concurrency.lockutils [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "f0925a44-c15b-4415-99bc-1b2366292fe4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.496386] env[62519]: DEBUG oslo_concurrency.lockutils [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "f0925a44-c15b-4415-99bc-1b2366292fe4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.496386] env[62519]: DEBUG oslo_concurrency.lockutils [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "f0925a44-c15b-4415-99bc-1b2366292fe4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.500439] env[62519]: INFO nova.compute.manager [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Terminating instance [ 1772.506942] env[62519]: DEBUG nova.network.neutron [req-a4b58859-4ad8-454e-83ae-1bec1b817a4f req-af70bd3c-be8e-45d4-8eb9-21efc47228a0 service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updated VIF entry in instance network info cache for port f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1772.507293] env[62519]: DEBUG nova.network.neutron [req-a4b58859-4ad8-454e-83ae-1bec1b817a4f req-af70bd3c-be8e-45d4-8eb9-21efc47228a0 service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance_info_cache with network_info: [{"id": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "address": "fa:16:3e:6c:b5:c8", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8b7229e-2b", "ovs_interfaceid": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.538425] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802931, 'name': CreateVM_Task, 'duration_secs': 0.571208} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.538647] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1772.539402] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.608663] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "2bc8f11e-82fc-4acb-945e-15327c133920" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.608969] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "2bc8f11e-82fc-4acb-945e-15327c133920" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.618585] env[62519]: DEBUG nova.scheduler.client.report [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1772.635784] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523c6cf6-7065-fa85-6495-111fb412e18a, 'name': SearchDatastore_Task, 'duration_secs': 0.021767} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.636116] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.636461] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1772.636571] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.636713] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.636890] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1772.637183] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.637482] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1772.637703] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e9659ca-c5e7-4c69-9a22-3ff6d50b6ec3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.640683] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.641427] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffd2f705-cd3c-42b1-821c-20d1b9cc414b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.647659] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1772.647659] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d7f814-0cfe-8ee9-e492-a8b4bab39552" [ 1772.647659] env[62519]: _type = "Task" [ 1772.647659] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.652114] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1772.652294] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1772.653400] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e96a53ea-cbbc-44f7-b914-59477c5f9c3b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.658273] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d7f814-0cfe-8ee9-e492-a8b4bab39552, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.661715] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1772.661715] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5291fd05-8155-b6ec-8916-7cdb2f9d90a0" [ 1772.661715] env[62519]: _type = "Task" [ 1772.661715] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.671291] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquiring lock "refresh_cache-4e7db12e-c7f7-4d2a-b797-1371fc839a9e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.671428] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquired lock "refresh_cache-4e7db12e-c7f7-4d2a-b797-1371fc839a9e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.671572] env[62519]: DEBUG nova.network.neutron [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1772.672672] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5291fd05-8155-b6ec-8916-7cdb2f9d90a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.706635] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802933, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.932377] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.969320] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1772.970148] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6a02a40-bd37-42e7-911d-12700a812c59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.979142] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1772.979142] env[62519]: value = "task-1802934" [ 1772.979142] env[62519]: _type = "Task" [ 1772.979142] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.987463] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802934, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.004457] env[62519]: DEBUG nova.compute.manager [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1773.004757] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1773.005560] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de66f78-bbb1-48b3-83d8-52af3935a4ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.010179] env[62519]: DEBUG oslo_concurrency.lockutils [req-a4b58859-4ad8-454e-83ae-1bec1b817a4f req-af70bd3c-be8e-45d4-8eb9-21efc47228a0 service nova] Releasing lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1773.013660] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1773.013901] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57eb9d01-b233-42c4-9a9f-39d1d3aa9949 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.034701] env[62519]: DEBUG oslo_vmware.api [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1773.034701] env[62519]: value = "task-1802935" [ 1773.034701] env[62519]: _type = "Task" [ 1773.034701] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.043528] env[62519]: DEBUG oslo_vmware.api [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802935, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.111346] env[62519]: DEBUG nova.compute.manager [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1773.125975] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.970s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.128653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.911s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.128995] env[62519]: DEBUG nova.objects.instance [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lazy-loading 'resources' on Instance uuid dd60afd6-2834-4fca-a846-e39d57aabd60 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1773.150212] env[62519]: INFO nova.scheduler.client.report [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Deleted allocations for instance 765cf18e-53a0-4cc6-ad0e-337a6f68915c [ 1773.168038] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d7f814-0cfe-8ee9-e492-a8b4bab39552, 'name': SearchDatastore_Task, 'duration_secs': 0.01105} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.169518] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1773.170032] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1773.170299] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.179331] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5291fd05-8155-b6ec-8916-7cdb2f9d90a0, 'name': SearchDatastore_Task, 'duration_secs': 0.014277} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.180842] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76bb9118-d47a-42c0-aeae-a42ad81594d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.190717] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1773.190717] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a6325d-be03-af32-631d-6b6f725a3d96" [ 1773.190717] env[62519]: _type = "Task" [ 1773.190717] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.198290] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a6325d-be03-af32-631d-6b6f725a3d96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.207915] env[62519]: DEBUG oslo_vmware.api [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1802933, 'name': PowerOnVM_Task, 'duration_secs': 0.993211} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.208226] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1773.208435] env[62519]: INFO nova.compute.manager [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Took 11.21 seconds to spawn the instance on the hypervisor. [ 1773.208673] env[62519]: DEBUG nova.compute.manager [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1773.209492] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc53610-8aae-4f34-9492-69ad74cb7a85 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.231282] env[62519]: DEBUG nova.network.neutron [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1773.414799] env[62519]: DEBUG nova.network.neutron [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Updating instance_info_cache with network_info: [{"id": "76d28a50-96fe-4a32-b58f-7f17862055da", "address": "fa:16:3e:20:70:ee", "network": {"id": "87f9dfbc-4e97-4fda-85bf-650a9b1d4370", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-169651517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f19ee01b59dc4df2889524f41f227f56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76d28a50-96", "ovs_interfaceid": "76d28a50-96fe-4a32-b58f-7f17862055da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.490788] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802934, 'name': PowerOffVM_Task, 'duration_secs': 0.284781} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.490788] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1773.492230] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1eddb80-0286-49ed-98dc-cf4846c9df3b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.515774] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6c95cc-33df-4c55-b444-273a7c74f497 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.544137] env[62519]: DEBUG oslo_vmware.api [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802935, 'name': PowerOffVM_Task, 'duration_secs': 0.247141} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.546128] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1773.546344] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1773.546791] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b08e3124-6182-4718-ab7e-7ba8a8c8f1f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.552881] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1773.553147] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6dc8dad-7d4a-4ae5-b658-d118dd42f4e2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.561186] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1773.561186] env[62519]: value = "task-1802937" [ 1773.561186] env[62519]: _type = "Task" [ 1773.561186] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.569493] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.639530] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.662775] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c39ff447-939e-4290-a599-7c5d8a75bafa tempest-ServersTestBootFromVolume-845595414 tempest-ServersTestBootFromVolume-845595414-project-member] Lock "765cf18e-53a0-4cc6-ad0e-337a6f68915c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.904s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.701411] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a6325d-be03-af32-631d-6b6f725a3d96, 'name': SearchDatastore_Task, 'duration_secs': 0.013064} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.701815] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1773.702495] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac/88f9351c-253b-49dd-a88e-b8585ea742ac.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1773.702867] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.703113] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1773.703400] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c0e9112-5ef7-42a8-9099-83d43ec4f2e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.705763] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e160fae2-0394-45a6-bf56-6c2a0b579b97 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.717007] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1773.717007] env[62519]: value = "task-1802938" [ 1773.717007] env[62519]: _type = "Task" [ 1773.717007] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.727318] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1773.727499] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1773.731098] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29352316-77a8-4792-9234-f7da373d6ce6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.732943] env[62519]: INFO nova.compute.manager [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Took 34.37 seconds to build instance. [ 1773.736926] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.742426] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1773.742426] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d8ebc2-41ab-84cc-6902-fab6cf2cd9cf" [ 1773.742426] env[62519]: _type = "Task" [ 1773.742426] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.752888] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d8ebc2-41ab-84cc-6902-fab6cf2cd9cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.858695] env[62519]: DEBUG nova.compute.manager [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Received event network-vif-plugged-76d28a50-96fe-4a32-b58f-7f17862055da {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1773.858992] env[62519]: DEBUG oslo_concurrency.lockutils [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] Acquiring lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.859325] env[62519]: DEBUG oslo_concurrency.lockutils [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] Lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.859325] env[62519]: DEBUG oslo_concurrency.lockutils [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] Lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.859583] env[62519]: DEBUG nova.compute.manager [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] No waiting events found dispatching network-vif-plugged-76d28a50-96fe-4a32-b58f-7f17862055da {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1773.859879] env[62519]: WARNING nova.compute.manager [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Received unexpected event network-vif-plugged-76d28a50-96fe-4a32-b58f-7f17862055da for instance with vm_state building and task_state spawning. [ 1773.859879] env[62519]: DEBUG nova.compute.manager [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Received event network-changed-76d28a50-96fe-4a32-b58f-7f17862055da {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1773.860025] env[62519]: DEBUG nova.compute.manager [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Refreshing instance network info cache due to event network-changed-76d28a50-96fe-4a32-b58f-7f17862055da. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1773.860101] env[62519]: DEBUG oslo_concurrency.lockutils [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] Acquiring lock "refresh_cache-4e7db12e-c7f7-4d2a-b797-1371fc839a9e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.870246] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1773.871444] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1773.871444] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Deleting the datastore file [datastore1] f0925a44-c15b-4415-99bc-1b2366292fe4 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1773.872583] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44c7edc0-a88c-4c77-9f7f-10c077ef4277 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.882258] env[62519]: DEBUG oslo_vmware.api [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for the task: (returnval){ [ 1773.882258] env[62519]: value = "task-1802939" [ 1773.882258] env[62519]: _type = "Task" [ 1773.882258] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.892250] env[62519]: DEBUG oslo_vmware.api [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802939, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.919546] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Releasing lock "refresh_cache-4e7db12e-c7f7-4d2a-b797-1371fc839a9e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1773.919928] env[62519]: DEBUG nova.compute.manager [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Instance network_info: |[{"id": "76d28a50-96fe-4a32-b58f-7f17862055da", "address": "fa:16:3e:20:70:ee", "network": {"id": "87f9dfbc-4e97-4fda-85bf-650a9b1d4370", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-169651517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f19ee01b59dc4df2889524f41f227f56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76d28a50-96", "ovs_interfaceid": "76d28a50-96fe-4a32-b58f-7f17862055da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1773.920567] env[62519]: DEBUG oslo_concurrency.lockutils [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] Acquired lock "refresh_cache-4e7db12e-c7f7-4d2a-b797-1371fc839a9e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.920701] env[62519]: DEBUG nova.network.neutron [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Refreshing network info cache for port 76d28a50-96fe-4a32-b58f-7f17862055da {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1773.922082] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:70:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae1d6a8-cbba-4bbf-af10-ba5467340475', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76d28a50-96fe-4a32-b58f-7f17862055da', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1773.930302] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Creating folder: Project (f19ee01b59dc4df2889524f41f227f56). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1773.936083] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8518942d-156c-47c6-9acd-baacc8af6cad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.953574] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Created folder: Project (f19ee01b59dc4df2889524f41f227f56) in parent group-v373567. [ 1773.953574] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Creating folder: Instances. Parent ref: group-v373798. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1773.953574] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24fbdc64-f963-4e36-aa81-fadbbf0bc873 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.965708] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Created folder: Instances in parent group-v373798. [ 1773.965960] env[62519]: DEBUG oslo.service.loopingcall [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1773.968860] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1773.969307] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-738f4cb3-bd1b-44a4-bb98-9f788b37dfbc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.991572] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04cd993-9090-4954-8ea9-2703cdaec6cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.001876] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a76b780-4154-486a-ac93-41ced2a1cd3d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.006360] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1774.006360] env[62519]: value = "task-1802942" [ 1774.006360] env[62519]: _type = "Task" [ 1774.006360] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.044189] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17aba37c-2a45-415f-92cd-b03d761a7eac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.051171] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802942, 'name': CreateVM_Task} progress is 15%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.057464] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509cfcf5-d8ff-46ff-b5d6-c93008134a51 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.077206] env[62519]: DEBUG nova.compute.provider_tree [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.082177] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1774.082412] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1774.082633] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.229882] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802938, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.255376] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d8ebc2-41ab-84cc-6902-fab6cf2cd9cf, 'name': SearchDatastore_Task, 'duration_secs': 0.011693} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.257208] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ed0ca92-114c-47e7-8700-e1f65f53b5c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.264680] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1774.264680] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e2be27-64ac-51b9-57bb-f8d2646d9465" [ 1774.264680] env[62519]: _type = "Task" [ 1774.264680] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.276166] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e2be27-64ac-51b9-57bb-f8d2646d9465, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.322105] env[62519]: DEBUG nova.network.neutron [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Updated VIF entry in instance network info cache for port 76d28a50-96fe-4a32-b58f-7f17862055da. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1774.322561] env[62519]: DEBUG nova.network.neutron [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Updating instance_info_cache with network_info: [{"id": "76d28a50-96fe-4a32-b58f-7f17862055da", "address": "fa:16:3e:20:70:ee", "network": {"id": "87f9dfbc-4e97-4fda-85bf-650a9b1d4370", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-169651517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f19ee01b59dc4df2889524f41f227f56", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae1d6a8-cbba-4bbf-af10-ba5467340475", "external-id": "nsx-vlan-transportzone-271", "segmentation_id": 271, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76d28a50-96", "ovs_interfaceid": "76d28a50-96fe-4a32-b58f-7f17862055da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.395741] env[62519]: DEBUG oslo_vmware.api [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802939, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.516822] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802942, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.582691] env[62519]: DEBUG nova.scheduler.client.report [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1774.730865] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802938, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680325} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.731182] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac/88f9351c-253b-49dd-a88e-b8585ea742ac.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1774.731379] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1774.731640] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cdbd2ab5-4c01-4d65-975e-9891387c1ae7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.739976] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1774.739976] env[62519]: value = "task-1802943" [ 1774.739976] env[62519]: _type = "Task" [ 1774.739976] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.753345] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.778982] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e2be27-64ac-51b9-57bb-f8d2646d9465, 'name': SearchDatastore_Task, 'duration_secs': 0.060022} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.779275] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.779535] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4a0f7975-5a07-4593-ae71-cabebdefe0fe/4a0f7975-5a07-4593-ae71-cabebdefe0fe.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1774.779817] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.780007] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1774.780471] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5b24f95-7d35-4a01-90d9-260e9609f21f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.782309] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c69a0b0e-6993-4de5-8acf-ed71d036d777 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.792046] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1774.792046] env[62519]: value = "task-1802944" [ 1774.792046] env[62519]: _type = "Task" [ 1774.792046] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.801352] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802944, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.806378] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1774.806589] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1774.807427] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97da106d-f5b9-4df2-b399-1616404143b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.813298] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1774.813298] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523303fc-b2ef-6849-9881-4142e524daeb" [ 1774.813298] env[62519]: _type = "Task" [ 1774.813298] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.823136] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523303fc-b2ef-6849-9881-4142e524daeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.825764] env[62519]: DEBUG oslo_concurrency.lockutils [req-394ead6c-a796-4077-9d9b-d4bf141d83ac req-2a9a4b0a-927c-4d02-8730-3656121b172d service nova] Releasing lock "refresh_cache-4e7db12e-c7f7-4d2a-b797-1371fc839a9e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.894052] env[62519]: DEBUG oslo_vmware.api [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Task: {'id': task-1802939, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.588849} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.894385] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1774.894602] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1774.894789] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1774.894963] env[62519]: INFO nova.compute.manager [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Took 1.89 seconds to destroy the instance on the hypervisor. [ 1774.895216] env[62519]: DEBUG oslo.service.loopingcall [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.895407] env[62519]: DEBUG nova.compute.manager [-] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1774.895503] env[62519]: DEBUG nova.network.neutron [-] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1775.020502] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802942, 'name': CreateVM_Task, 'duration_secs': 0.583079} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.020704] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1775.021546] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.021738] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.022077] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1775.022335] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-394b3bd3-d9fa-4a49-addc-9cda933ac28d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.028238] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1775.028238] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521d0a3c-656c-7a61-ed38-e80bdd3ed4a0" [ 1775.028238] env[62519]: _type = "Task" [ 1775.028238] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.040312] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521d0a3c-656c-7a61-ed38-e80bdd3ed4a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.087028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.958s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.091242] env[62519]: DEBUG nova.compute.manager [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Stashing vm_state: active {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 1775.095253] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.515s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.098023] env[62519]: DEBUG nova.objects.instance [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lazy-loading 'resources' on Instance uuid 76786353-f93f-4e7e-b3f7-7f22ae4b7b41 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1775.124036] env[62519]: INFO nova.scheduler.client.report [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Deleted allocations for instance dd60afd6-2834-4fca-a846-e39d57aabd60 [ 1775.245450] env[62519]: DEBUG oslo_concurrency.lockutils [None req-caa9ca9c-57be-478a-8782-107ca9749996 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.898s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.251729] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.234469} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.252012] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1775.253605] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6dfb3f-78ef-4525-9752-9bf4f2b805d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.284208] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac/88f9351c-253b-49dd-a88e-b8585ea742ac.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1775.284546] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f38cffd4-730b-4f3e-a5bc-d5e2a9c82cd2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.313344] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802944, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.316083] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1775.316083] env[62519]: value = "task-1802945" [ 1775.316083] env[62519]: _type = "Task" [ 1775.316083] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.335052] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523303fc-b2ef-6849-9881-4142e524daeb, 'name': SearchDatastore_Task, 'duration_secs': 0.01321} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.335319] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802945, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.336432] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7a17a6d-662e-4ae0-bad5-2c5d61f50fe9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.344509] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1775.344509] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ad8a4f-2567-fdb8-44e6-ab5289604eb8" [ 1775.344509] env[62519]: _type = "Task" [ 1775.344509] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.359024] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ad8a4f-2567-fdb8-44e6-ab5289604eb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.540302] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521d0a3c-656c-7a61-ed38-e80bdd3ed4a0, 'name': SearchDatastore_Task, 'duration_secs': 0.100091} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.540597] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.540890] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1775.541152] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.618399] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.636978] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f01efc4a-1a23-4e62-a702-1f856670c4f6 tempest-ImagesTestJSON-1541855264 tempest-ImagesTestJSON-1541855264-project-member] Lock "dd60afd6-2834-4fca-a846-e39d57aabd60" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.170s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.815265] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802944, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.93103} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.815599] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4a0f7975-5a07-4593-ae71-cabebdefe0fe/4a0f7975-5a07-4593-ae71-cabebdefe0fe.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1775.815742] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1775.815986] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03a393a2-8bcd-4ee5-9c0e-268c0b23a6dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.827801] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1775.827801] env[62519]: value = "task-1802946" [ 1775.827801] env[62519]: _type = "Task" [ 1775.827801] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.837663] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802945, 'name': ReconfigVM_Task, 'duration_secs': 0.480886} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.838602] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac/88f9351c-253b-49dd-a88e-b8585ea742ac.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1775.840201] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'guest_format': None, 'boot_index': 0, 'encryption_format': None, 'encrypted': False, 'encryption_options': None, 'device_type': 'disk', 'disk_bus': None, 'image_id': '15793716-f1d9-4a86-9030-717adf498693'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'}, 'guest_format': None, 'boot_index': None, 'delete_on_termination': False, 'device_type': None, 'disk_bus': None, 'mount_device': '/dev/sdb', 'attachment_id': '4a3a274a-374f-454a-8765-be71583331a1', 'volume_type': None}], 'swap': None} {{(pid=62519) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1775.840439] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1775.840659] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1775.844456] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe04a3e-ab8a-4732-9885-a3825d5e6b76 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.847149] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.873173] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8d6671-8d98-4b93-9911-b61b225de085 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.880124] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ad8a4f-2567-fdb8-44e6-ab5289604eb8, 'name': SearchDatastore_Task, 'duration_secs': 0.01253} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.880836] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.881124] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2ea8304e-5b91-4908-a876-6e2c780b1da9/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. {{(pid=62519) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1775.881433] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.881672] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1775.881898] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7512547-9caf-4231-b883-9e7920ef8b65 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.897253] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d6bb7fc-9055-4866-a9d8-f2ca2389524b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.907567] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] volume-5bd7c4a6-d552-4040-aefb-7d5577b89149/volume-5bd7c4a6-d552-4040-aefb-7d5577b89149.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1775.910981] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48754120-02ce-40f2-969d-d4855bbe6c1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.926873] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1775.926873] env[62519]: value = "task-1802947" [ 1775.926873] env[62519]: _type = "Task" [ 1775.926873] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.933197] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1775.933197] env[62519]: value = "task-1802948" [ 1775.933197] env[62519]: _type = "Task" [ 1775.933197] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.936730] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.941211] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1775.941412] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1775.945397] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7605547f-f272-4208-a31e-040725f589f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.955022] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802948, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.956157] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1775.956157] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521fbe23-3619-6f63-4e8f-6610813c1fa2" [ 1775.956157] env[62519]: _type = "Task" [ 1775.956157] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.965771] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521fbe23-3619-6f63-4e8f-6610813c1fa2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.971432] env[62519]: DEBUG nova.network.neutron [-] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.007493] env[62519]: DEBUG nova.compute.manager [req-1fe6e65f-e72b-49fd-84cb-de299c8c7b1d req-10ef5193-63f5-4893-960b-292ca94ac456 service nova] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Received event network-vif-deleted-4406b619-4b6f-453e-a6e6-7f9f83d1b8ff {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1776.049687] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e8c018-e3ee-4412-ab3a-6592a249c737 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.058869] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1659c2c-05e6-4e62-8ef3-c637210c8372 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.093322] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ba4cb9-f72c-47e0-8211-f54c04c53c53 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.101821] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab6a0f6-30fe-442d-8dc0-17bb8a910d59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.117261] env[62519]: DEBUG nova.compute.provider_tree [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1776.343121] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073217} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.343121] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1776.343924] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd58e14-ee06-4932-bab8-d408c8411bf5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.378975] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 4a0f7975-5a07-4593-ae71-cabebdefe0fe/4a0f7975-5a07-4593-ae71-cabebdefe0fe.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1776.379405] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0a51560-a5f3-4443-bc9e-68a271695463 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.403313] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1776.403313] env[62519]: value = "task-1802949" [ 1776.403313] env[62519]: _type = "Task" [ 1776.403313] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.414077] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802949, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.437102] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802947, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.446314] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802948, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.470955] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521fbe23-3619-6f63-4e8f-6610813c1fa2, 'name': SearchDatastore_Task, 'duration_secs': 0.012431} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.471797] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c3cdf59-9761-467f-85ab-3f7bb8f252b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.474559] env[62519]: INFO nova.compute.manager [-] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Took 1.58 seconds to deallocate network for instance. [ 1776.481674] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1776.481674] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a45e11-9850-a3da-8e31-784fb95e6c69" [ 1776.481674] env[62519]: _type = "Task" [ 1776.481674] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.492098] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a45e11-9850-a3da-8e31-784fb95e6c69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.620916] env[62519]: DEBUG nova.scheduler.client.report [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1776.917523] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802949, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.942543] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802947, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536004} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.947089] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2ea8304e-5b91-4908-a876-6e2c780b1da9/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. [ 1776.947089] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9247b198-5fbd-4986-a422-30288b27d547 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.955280] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802948, 'name': ReconfigVM_Task, 'duration_secs': 0.938823} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.968531] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfigured VM instance instance-00000047 to attach disk [datastore1] volume-5bd7c4a6-d552-4040-aefb-7d5577b89149/volume-5bd7c4a6-d552-4040-aefb-7d5577b89149.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1776.981812] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 2ea8304e-5b91-4908-a876-6e2c780b1da9/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1776.982149] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbb8de41-8ed7-4a9f-9670-1b8b6a89618b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.993529] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8ead12a-d96b-4e8d-b829-c49f596b4149 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.008709] env[62519]: DEBUG oslo_concurrency.lockutils [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.021263] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a45e11-9850-a3da-8e31-784fb95e6c69, 'name': SearchDatastore_Task, 'duration_secs': 0.029644} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.023981] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.023981] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4e7db12e-c7f7-4d2a-b797-1371fc839a9e/4e7db12e-c7f7-4d2a-b797-1371fc839a9e.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1777.024227] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1777.024227] env[62519]: value = "task-1802951" [ 1777.024227] env[62519]: _type = "Task" [ 1777.024227] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.024487] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1777.024487] env[62519]: value = "task-1802950" [ 1777.024487] env[62519]: _type = "Task" [ 1777.024487] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.024737] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53d6d790-7b92-4c34-b6a8-0393661c71f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.039026] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1777.039026] env[62519]: value = "task-1802952" [ 1777.039026] env[62519]: _type = "Task" [ 1777.039026] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.045070] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802951, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.045550] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802950, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.055502] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.129766] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.034s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.132590] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.132772] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.132946] env[62519]: DEBUG nova.compute.manager [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1777.133250] env[62519]: DEBUG oslo_concurrency.lockutils [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.415s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.133575] env[62519]: DEBUG nova.objects.instance [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lazy-loading 'resources' on Instance uuid af422ca1-7966-4bed-97bf-2b4c5285eaab {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1777.135288] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af571fe8-2938-4644-b89b-152fb7fc3b5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.147186] env[62519]: DEBUG nova.compute.manager [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1777.150018] env[62519]: DEBUG nova.objects.instance [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'flavor' on Instance uuid 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1777.177318] env[62519]: INFO nova.scheduler.client.report [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleted allocations for instance 76786353-f93f-4e7e-b3f7-7f22ae4b7b41 [ 1777.425259] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802949, 'name': ReconfigVM_Task, 'duration_secs': 0.941047} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.425611] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 4a0f7975-5a07-4593-ae71-cabebdefe0fe/4a0f7975-5a07-4593-ae71-cabebdefe0fe.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1777.427309] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ee3c956-0b4e-4e00-a961-3ba66b4da76f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.436063] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1777.436063] env[62519]: value = "task-1802953" [ 1777.436063] env[62519]: _type = "Task" [ 1777.436063] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.449588] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802953, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.545539] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802950, 'name': ReconfigVM_Task, 'duration_secs': 0.299597} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.548848] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1777.549726] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802951, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.550127] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c61ad874-15e9-4417-9e5f-be86981aa247 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.565404] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802952, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.567439] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1777.567439] env[62519]: value = "task-1802954" [ 1777.567439] env[62519]: _type = "Task" [ 1777.567439] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.583013] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802954, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.689312] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de6056f6-9d9f-42ab-913c-cfc39f5eba39 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "76786353-f93f-4e7e-b3f7-7f22ae4b7b41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.310s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.941958] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1ce99f-2f63-4a1f-92f5-f62d1f31a3a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.951151] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802953, 'name': Rename_Task, 'duration_secs': 0.256329} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.953126] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1777.953459] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6375c94-8d1a-43eb-9638-07209f410ad6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.955781] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5120191-1bcf-44cd-a8e4-2ddefcdc9b9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.991104] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483c9fbe-bc12-44d8-8956-161012ee56f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.994081] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1777.994081] env[62519]: value = "task-1802955" [ 1777.994081] env[62519]: _type = "Task" [ 1777.994081] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.002946] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d680d5-c250-49bf-a103-25fb1cd04e7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.010706] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802955, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.024151] env[62519]: DEBUG nova.compute.provider_tree [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1778.038383] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802951, 'name': ReconfigVM_Task, 'duration_secs': 0.574386} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.039405] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 2ea8304e-5b91-4908-a876-6e2c780b1da9/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1778.040330] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36430a63-c702-49d3-9f16-cdbda53b7f0b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.069913] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d31da04-fab4-418a-a46e-98b9ffe9482e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.089757] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802954, 'name': Rename_Task, 'duration_secs': 0.489423} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.094139] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1778.094360] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802952, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577748} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.094611] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1778.094611] env[62519]: value = "task-1802956" [ 1778.094611] env[62519]: _type = "Task" [ 1778.094611] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.095989] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68c39ba4-6884-490a-83a3-5e641cc42135 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.096544] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4e7db12e-c7f7-4d2a-b797-1371fc839a9e/4e7db12e-c7f7-4d2a-b797-1371fc839a9e.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1778.096751] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1778.097372] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f1bac50-243c-4772-984a-60ddb6fafb3b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.108348] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802956, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.109814] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1778.109814] env[62519]: value = "task-1802957" [ 1778.109814] env[62519]: _type = "Task" [ 1778.109814] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.111076] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1778.111076] env[62519]: value = "task-1802958" [ 1778.111076] env[62519]: _type = "Task" [ 1778.111076] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.123807] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802957, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.126785] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.155293] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1778.155705] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f372d100-70df-4432-b593-b47a736f125b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.164794] env[62519]: DEBUG oslo_vmware.api [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1778.164794] env[62519]: value = "task-1802959" [ 1778.164794] env[62519]: _type = "Task" [ 1778.164794] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.174759] env[62519]: DEBUG oslo_vmware.api [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.506529] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802955, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.529740] env[62519]: DEBUG nova.scheduler.client.report [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1778.611156] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802956, 'name': ReconfigVM_Task, 'duration_secs': 0.358908} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.611849] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1778.625023] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64bfc4a4-396e-4ff2-a946-d84bc2574cf4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.635343] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802958, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080519} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.639893] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1778.640284] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1778.640284] env[62519]: value = "task-1802960" [ 1778.640284] env[62519]: _type = "Task" [ 1778.640284] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.641835] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802957, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.641835] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d298c4f3-2fc9-4ef3-9917-d41119438aa0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.656132] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802960, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.675072] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 4e7db12e-c7f7-4d2a-b797-1371fc839a9e/4e7db12e-c7f7-4d2a-b797-1371fc839a9e.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1778.679867] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7dff733-2d32-46f4-8477-c6b6920a5f47 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.703106] env[62519]: DEBUG oslo_vmware.api [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802959, 'name': PowerOffVM_Task, 'duration_secs': 0.288033} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.705099] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1778.705200] env[62519]: DEBUG nova.compute.manager [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1778.705586] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1778.705586] env[62519]: value = "task-1802961" [ 1778.705586] env[62519]: _type = "Task" [ 1778.705586] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.706581] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bc12ba-3976-482d-9007-3c32e648186f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.721171] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802961, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.007545] env[62519]: DEBUG oslo_vmware.api [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1802955, 'name': PowerOnVM_Task, 'duration_secs': 0.87077} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.008109] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1779.008109] env[62519]: INFO nova.compute.manager [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Took 10.35 seconds to spawn the instance on the hypervisor. [ 1779.008343] env[62519]: DEBUG nova.compute.manager [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1779.009429] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57df48f-278e-4013-8112-5b38545a674d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.035980] env[62519]: DEBUG oslo_concurrency.lockutils [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.903s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.039254] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.002s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.039868] env[62519]: DEBUG nova.objects.instance [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lazy-loading 'resources' on Instance uuid 156ed02a-3365-4a4f-b4de-ea86920d3baf {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1779.071019] env[62519]: INFO nova.scheduler.client.report [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Deleted allocations for instance af422ca1-7966-4bed-97bf-2b4c5285eaab [ 1779.124590] env[62519]: DEBUG oslo_vmware.api [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1802957, 'name': PowerOnVM_Task, 'duration_secs': 0.825925} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.124955] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1779.125205] env[62519]: DEBUG nova.compute.manager [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1779.126792] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c052d9e2-2e98-45ff-b2dd-7429c93fa4fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.160834] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.223655] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802961, 'name': ReconfigVM_Task, 'duration_secs': 0.513252} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.224810] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 4e7db12e-c7f7-4d2a-b797-1371fc839a9e/4e7db12e-c7f7-4d2a-b797-1371fc839a9e.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1779.225340] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc0ad3e2-f2f5-44f7-bfd8-98a8eceb3b17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.231301] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7460c943-528c-4fdf-b5c5-931a75f3518f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.098s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.241599] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1779.241599] env[62519]: value = "task-1802962" [ 1779.241599] env[62519]: _type = "Task" [ 1779.241599] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.257434] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802962, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.535815] env[62519]: INFO nova.compute.manager [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Took 30.86 seconds to build instance. [ 1779.581197] env[62519]: DEBUG oslo_concurrency.lockutils [None req-01d21426-eca2-4ae1-921c-e2c4a96f9de6 tempest-MultipleCreateTestJSON-462787329 tempest-MultipleCreateTestJSON-462787329-project-member] Lock "af422ca1-7966-4bed-97bf-2b4c5285eaab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.059s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.650865] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.663244] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.744264] env[62519]: DEBUG nova.objects.instance [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'flavor' on Instance uuid 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1779.761268] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802962, 'name': Rename_Task, 'duration_secs': 0.225561} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.761529] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1779.761529] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-233b1019-4b2a-40aa-b084-8fa5b4acd658 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.773411] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1779.773411] env[62519]: value = "task-1802963" [ 1779.773411] env[62519]: _type = "Task" [ 1779.773411] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.784021] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802963, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.885877] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e1a7b0-558f-49b3-a666-2c75bc7b24b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.898537] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a77b949-5a46-4d42-ae9e-44ee6d16bd64 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.948984] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22bf593f-6e01-4885-ba98-eb1f91e36d8e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.958869] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1deddc-a06c-44f1-901b-6d8786cba8bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.982475] env[62519]: DEBUG nova.compute.provider_tree [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1780.161890] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.254177] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.254344] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.254514] env[62519]: DEBUG nova.network.neutron [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1780.254689] env[62519]: DEBUG nova.objects.instance [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'info_cache' on Instance uuid 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1780.283418] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802963, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.485350] env[62519]: DEBUG nova.scheduler.client.report [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1780.663721] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802960, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.759259] env[62519]: DEBUG nova.objects.base [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Object Instance<8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8> lazy-loaded attributes: flavor,info_cache {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1780.785847] env[62519]: DEBUG oslo_vmware.api [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802963, 'name': PowerOnVM_Task, 'duration_secs': 0.883536} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.786505] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1780.787027] env[62519]: INFO nova.compute.manager [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Took 9.59 seconds to spawn the instance on the hypervisor. [ 1780.787505] env[62519]: DEBUG nova.compute.manager [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1780.793626] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dab1f45-cde4-4806-9e99-e211dfbe71cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.992131] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.954s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.995013] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.896s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.999390] env[62519]: INFO nova.compute.claims [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1781.023044] env[62519]: INFO nova.scheduler.client.report [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Deleted allocations for instance 156ed02a-3365-4a4f-b4de-ea86920d3baf [ 1781.044287] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ec5e37a-37cf-41c7-9c93-a04efad8446e tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.384s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.162770] env[62519]: DEBUG oslo_vmware.api [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802960, 'name': PowerOnVM_Task, 'duration_secs': 2.32316} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.163643] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1781.166880] env[62519]: DEBUG nova.compute.manager [None req-42d8ba5a-d3d3-4638-a7a7-fe3fd6d26413 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1781.167837] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f1b161-3c40-45d2-b615-5a6c8bf8cf13 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.261459] env[62519]: DEBUG nova.compute.manager [req-24340626-0b4a-43c5-94f5-b8ffb5a1b4f7 req-62096cb0-c3a6-4968-9a3a-9b9f4bc14fca service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Received event network-changed-f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1781.261650] env[62519]: DEBUG nova.compute.manager [req-24340626-0b4a-43c5-94f5-b8ffb5a1b4f7 req-62096cb0-c3a6-4968-9a3a-9b9f4bc14fca service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Refreshing instance network info cache due to event network-changed-f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1781.261869] env[62519]: DEBUG oslo_concurrency.lockutils [req-24340626-0b4a-43c5-94f5-b8ffb5a1b4f7 req-62096cb0-c3a6-4968-9a3a-9b9f4bc14fca service nova] Acquiring lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.262028] env[62519]: DEBUG oslo_concurrency.lockutils [req-24340626-0b4a-43c5-94f5-b8ffb5a1b4f7 req-62096cb0-c3a6-4968-9a3a-9b9f4bc14fca service nova] Acquired lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.263120] env[62519]: DEBUG nova.network.neutron [req-24340626-0b4a-43c5-94f5-b8ffb5a1b4f7 req-62096cb0-c3a6-4968-9a3a-9b9f4bc14fca service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Refreshing network info cache for port f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1781.315584] env[62519]: INFO nova.compute.manager [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Took 29.03 seconds to build instance. [ 1781.541491] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7e02c46c-e5c8-46a7-b4cc-fe4ab917acc4 tempest-ServerRescueTestJSONUnderV235-415838788 tempest-ServerRescueTestJSONUnderV235-415838788-project-member] Lock "156ed02a-3365-4a4f-b4de-ea86920d3baf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.111s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.579613] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquiring lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.579613] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.084633] env[62519]: DEBUG nova.compute.manager [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1782.121833] env[62519]: DEBUG nova.network.neutron [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.184247] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquiring lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.385463] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938ab5df-5f5c-47d5-8f76-d68504ac4ca5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.402495] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb625dc-0eac-4bcf-b901-29400403af84 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.440384] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7860fa-17f4-495f-afc7-dbf1def1d036 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.451177] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75669276-152a-46f1-9014-4eebfaae2c81 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.467068] env[62519]: DEBUG nova.compute.provider_tree [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1782.554128] env[62519]: DEBUG nova.network.neutron [req-24340626-0b4a-43c5-94f5-b8ffb5a1b4f7 req-62096cb0-c3a6-4968-9a3a-9b9f4bc14fca service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updated VIF entry in instance network info cache for port f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1782.554761] env[62519]: DEBUG nova.network.neutron [req-24340626-0b4a-43c5-94f5-b8ffb5a1b4f7 req-62096cb0-c3a6-4968-9a3a-9b9f4bc14fca service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance_info_cache with network_info: [{"id": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "address": "fa:16:3e:6c:b5:c8", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8b7229e-2b", "ovs_interfaceid": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.612045] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.632129] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.824850] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8710f12d-0478-454f-8c54-66bd5d3dd37c tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.550s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.825115] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.641s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.825302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquiring lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.825524] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.825725] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.828278] env[62519]: INFO nova.compute.manager [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Terminating instance [ 1782.973463] env[62519]: DEBUG nova.scheduler.client.report [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1783.057844] env[62519]: DEBUG oslo_concurrency.lockutils [req-24340626-0b4a-43c5-94f5-b8ffb5a1b4f7 req-62096cb0-c3a6-4968-9a3a-9b9f4bc14fca service nova] Releasing lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.333274] env[62519]: DEBUG nova.compute.manager [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1783.333630] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1783.335075] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26299189-d953-4381-95c2-2658bb0a23e2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.351586] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1783.351892] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff5465b5-7d59-445e-aab6-45b82a3b8d46 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.363265] env[62519]: DEBUG oslo_vmware.api [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1783.363265] env[62519]: value = "task-1802964" [ 1783.363265] env[62519]: _type = "Task" [ 1783.363265] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.379147] env[62519]: DEBUG oslo_vmware.api [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.479377] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.479377] env[62519]: DEBUG nova.compute.manager [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1783.481756] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.228s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.645176] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1783.645666] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27ed96f6-5bdd-4745-9256-2a2762348603 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.655778] env[62519]: DEBUG oslo_vmware.api [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1783.655778] env[62519]: value = "task-1802965" [ 1783.655778] env[62519]: _type = "Task" [ 1783.655778] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.670477] env[62519]: DEBUG oslo_vmware.api [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802965, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.872978] env[62519]: DEBUG oslo_vmware.api [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802964, 'name': PowerOffVM_Task, 'duration_secs': 0.310904} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.873365] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1783.873581] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1783.873862] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a944ae7-f22d-4ba2-9832-7edc0d73799f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.984028] env[62519]: DEBUG nova.compute.utils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1783.985084] env[62519]: DEBUG nova.compute.manager [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1783.985246] env[62519]: DEBUG nova.network.neutron [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1784.008862] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1784.009120] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1784.009302] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Deleting the datastore file [datastore1] 4e7db12e-c7f7-4d2a-b797-1371fc839a9e {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1784.009880] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b165ef8-5734-423e-8bf4-68aaeb2bf935 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.036190] env[62519]: DEBUG oslo_vmware.api [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for the task: (returnval){ [ 1784.036190] env[62519]: value = "task-1802967" [ 1784.036190] env[62519]: _type = "Task" [ 1784.036190] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.040523] env[62519]: DEBUG nova.policy [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '053a30aaf81b4cbd8ced7018ebfe1f40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e755fb5a6e94068b6c99b1638081f5f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1784.048384] env[62519]: DEBUG oslo_vmware.api [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802967, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.169768] env[62519]: DEBUG oslo_vmware.api [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802965, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.332693] env[62519]: DEBUG nova.network.neutron [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Successfully created port: 87d0022f-848b-4f80-b7c3-e234bf681457 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1784.356622] env[62519]: INFO nova.compute.manager [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Rescuing [ 1784.356622] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.357202] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.357202] env[62519]: DEBUG nova.network.neutron [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.497280] env[62519]: DEBUG nova.compute.manager [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1784.506793] env[62519]: INFO nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating resource usage from migration 2ff5e489-a567-4b94-8bed-d596558da36f [ 1784.545055] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 11d4a010-959f-4f53-94dc-7499007612ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.545055] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f0925a44-c15b-4415-99bc-1b2366292fe4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1784.545055] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.545055] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 46b3a0fb-29f6-4b66-a091-2d125b69d109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.545312] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance bace23b3-b7f4-4f3b-8986-0076440d096d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.545312] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 417588f8-6288-4ecd-9764-dbc923549c5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.545382] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 99f22198-1a65-4d0d-b665-90c7063dbdb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.545433] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 9f71845a-e80c-4822-b3de-717f1d83bc49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.546049] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 88f9351c-253b-49dd-a88e-b8585ea742ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.546049] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance ee4b10ba-1c56-47cf-a528-d6e65c286ddb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.546049] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 2ea8304e-5b91-4908-a876-6e2c780b1da9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.546049] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4a0f7975-5a07-4593-ae71-cabebdefe0fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.546049] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4e7db12e-c7f7-4d2a-b797-1371fc839a9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.547457] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 358ee402-1112-4eea-a518-a45a6bf92c31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1784.552666] env[62519]: DEBUG oslo_vmware.api [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Task: {'id': task-1802967, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173417} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.552666] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1784.552666] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1784.552666] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1784.552666] env[62519]: INFO nova.compute.manager [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1784.553015] env[62519]: DEBUG oslo.service.loopingcall [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1784.553015] env[62519]: DEBUG nova.compute.manager [-] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1784.553093] env[62519]: DEBUG nova.network.neutron [-] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1784.672371] env[62519]: DEBUG oslo_vmware.api [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1802965, 'name': PowerOnVM_Task, 'duration_secs': 0.928667} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.672595] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1784.672795] env[62519]: DEBUG nova.compute.manager [None req-e6f5d5ed-1e0c-44aa-9e18-eebaba20c50d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1784.673639] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e063fddf-2140-494b-87ec-fa7a62d041d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.054451] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1785.195474] env[62519]: DEBUG nova.network.neutron [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Updating instance_info_cache with network_info: [{"id": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "address": "fa:16:3e:1f:a9:ee", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99b49b4e-ab", "ovs_interfaceid": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.281337] env[62519]: DEBUG nova.compute.manager [req-ff8e5411-1837-4456-b00a-5f6b50604e61 req-99d92c2b-2f71-45a4-9790-efb135b75df2 service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Received event network-vif-deleted-76d28a50-96fe-4a32-b58f-7f17862055da {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1785.281536] env[62519]: INFO nova.compute.manager [req-ff8e5411-1837-4456-b00a-5f6b50604e61 req-99d92c2b-2f71-45a4-9790-efb135b75df2 service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Neutron deleted interface 76d28a50-96fe-4a32-b58f-7f17862055da; detaching it from the instance and deleting it from the info cache [ 1785.281747] env[62519]: DEBUG nova.network.neutron [req-ff8e5411-1837-4456-b00a-5f6b50604e61 req-99d92c2b-2f71-45a4-9790-efb135b75df2 service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.444010] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.444341] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.521014] env[62519]: DEBUG nova.compute.manager [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1785.528032] env[62519]: DEBUG nova.network.neutron [-] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.558258] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1785.560569] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1785.560811] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1785.560966] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1785.561187] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1785.561341] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1785.561481] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1785.561706] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1785.561832] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1785.561995] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1785.562848] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1785.563951] env[62519]: DEBUG nova.virt.hardware [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1785.564256] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c046bc-80ad-41c3-87db-a2b1128d10a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.577242] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b899e6-9912-4824-b4d3-8286f92c8916 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.697064] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.787023] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8d02fe28-6d31-4f2f-9285-68f3b4cf8a13 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.796497] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1badd82-08a2-4369-90c0-6ccac005ed6a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.833744] env[62519]: DEBUG nova.compute.manager [req-ff8e5411-1837-4456-b00a-5f6b50604e61 req-99d92c2b-2f71-45a4-9790-efb135b75df2 service nova] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Detach interface failed, port_id=76d28a50-96fe-4a32-b58f-7f17862055da, reason: Instance 4e7db12e-c7f7-4d2a-b797-1371fc839a9e could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1785.947807] env[62519]: DEBUG nova.compute.manager [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1786.034683] env[62519]: INFO nova.compute.manager [-] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Took 1.48 seconds to deallocate network for instance. [ 1786.070381] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 2bc8f11e-82fc-4acb-945e-15327c133920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1786.070381] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Migration 2ff5e489-a567-4b94-8bed-d596558da36f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1786.070381] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 5da884af-d8d2-409b-99bd-e5370e44e9f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1786.290663] env[62519]: DEBUG nova.network.neutron [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Successfully updated port: 87d0022f-848b-4f80-b7c3-e234bf681457 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1786.475388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.541167] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.574759] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 1c4615c7-d145-4529-98bd-1ae3ed51e1b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1786.575126] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1786.575286] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1786.793710] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "refresh_cache-358ee402-1112-4eea-a518-a45a6bf92c31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.793710] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "refresh_cache-358ee402-1112-4eea-a518-a45a6bf92c31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.794411] env[62519]: DEBUG nova.network.neutron [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1786.862308] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09c9783-c228-4478-b0cd-6626be1ac08f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.870053] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9b5c14-587f-4664-ae41-c1acee46419a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.904704] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4dc2da5-1142-4418-ba10-da6ed7442d39 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.913518] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41093207-b35e-4ef1-9d6d-b5aacba19593 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.928242] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1787.238175] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1787.238579] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2fa48fd-8fb2-4233-ba7d-1547e3481ee1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.247275] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1787.247275] env[62519]: value = "task-1802968" [ 1787.247275] env[62519]: _type = "Task" [ 1787.247275] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.256159] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.327131] env[62519]: DEBUG nova.compute.manager [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Received event network-vif-plugged-87d0022f-848b-4f80-b7c3-e234bf681457 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1787.327412] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] Acquiring lock "358ee402-1112-4eea-a518-a45a6bf92c31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.327600] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] Lock "358ee402-1112-4eea-a518-a45a6bf92c31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.327777] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] Lock "358ee402-1112-4eea-a518-a45a6bf92c31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.328264] env[62519]: DEBUG nova.compute.manager [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] No waiting events found dispatching network-vif-plugged-87d0022f-848b-4f80-b7c3-e234bf681457 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1787.328264] env[62519]: WARNING nova.compute.manager [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Received unexpected event network-vif-plugged-87d0022f-848b-4f80-b7c3-e234bf681457 for instance with vm_state building and task_state spawning. [ 1787.328264] env[62519]: DEBUG nova.compute.manager [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Received event network-changed-87d0022f-848b-4f80-b7c3-e234bf681457 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1787.328449] env[62519]: DEBUG nova.compute.manager [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Refreshing instance network info cache due to event network-changed-87d0022f-848b-4f80-b7c3-e234bf681457. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1787.328564] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] Acquiring lock "refresh_cache-358ee402-1112-4eea-a518-a45a6bf92c31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.333761] env[62519]: DEBUG nova.network.neutron [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1787.431386] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1787.474648] env[62519]: DEBUG nova.network.neutron [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Updating instance_info_cache with network_info: [{"id": "87d0022f-848b-4f80-b7c3-e234bf681457", "address": "fa:16:3e:11:a1:ae", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87d0022f-84", "ovs_interfaceid": "87d0022f-848b-4f80-b7c3-e234bf681457", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.758085] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802968, 'name': PowerOffVM_Task, 'duration_secs': 0.249451} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.758430] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1787.759212] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6208d33-ddbb-4f1a-a31c-b1ae72b3b086 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.776521] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424f5dc2-0773-4c64-8c65-fb9e998a8d24 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.809781] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1787.810090] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d05cc304-f7fb-4484-98b3-ceb09ad5ba20 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.817563] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1787.817563] env[62519]: value = "task-1802969" [ 1787.817563] env[62519]: _type = "Task" [ 1787.817563] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.828236] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1787.828433] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1787.828694] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.828886] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.829082] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1787.829319] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-981e742d-046d-4c78-8cfe-204e9bd4b4e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.837855] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1787.838036] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1787.838728] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed78a19e-c066-4f45-b633-2204637747ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.843951] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1787.843951] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cd1218-c8c4-b60e-b8c3-a157f694c1db" [ 1787.843951] env[62519]: _type = "Task" [ 1787.843951] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.852150] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cd1218-c8c4-b60e-b8c3-a157f694c1db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.936639] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1787.937007] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.455s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.937306] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.297s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.940034] env[62519]: INFO nova.compute.claims [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1787.942855] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1787.942998] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11720}} [ 1787.977870] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "refresh_cache-358ee402-1112-4eea-a518-a45a6bf92c31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.978257] env[62519]: DEBUG nova.compute.manager [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Instance network_info: |[{"id": "87d0022f-848b-4f80-b7c3-e234bf681457", "address": "fa:16:3e:11:a1:ae", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87d0022f-84", "ovs_interfaceid": "87d0022f-848b-4f80-b7c3-e234bf681457", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1787.978587] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] Acquired lock "refresh_cache-358ee402-1112-4eea-a518-a45a6bf92c31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.978806] env[62519]: DEBUG nova.network.neutron [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Refreshing network info cache for port 87d0022f-848b-4f80-b7c3-e234bf681457 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1787.980142] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:a1:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87d0022f-848b-4f80-b7c3-e234bf681457', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1787.987459] env[62519]: DEBUG oslo.service.loopingcall [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1787.989243] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1787.989468] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7665b1c-b27e-4ac4-9ad0-5aec94cba6ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.010500] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1788.010500] env[62519]: value = "task-1802970" [ 1788.010500] env[62519]: _type = "Task" [ 1788.010500] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.019708] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802970, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.356941] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cd1218-c8c4-b60e-b8c3-a157f694c1db, 'name': SearchDatastore_Task, 'duration_secs': 0.009563} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.357959] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db6f11ca-6905-4b9c-b6c3-aa8b9117ebcc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.365643] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1788.365643] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]528a59f4-d786-6bbd-1b3c-657f6b7b6e50" [ 1788.365643] env[62519]: _type = "Task" [ 1788.365643] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.375514] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528a59f4-d786-6bbd-1b3c-657f6b7b6e50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.462437] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] There are 59 instances to clean {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11729}} [ 1788.462646] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: af422ca1-7966-4bed-97bf-2b4c5285eaab] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1788.521899] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802970, 'name': CreateVM_Task, 'duration_secs': 0.471369} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.522133] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1788.522891] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.523111] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.523452] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1788.526210] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5e52d0c-3782-418e-b0ea-345f2b723a63 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.533704] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1788.533704] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a23693-b79e-1752-7223-15c90bc27790" [ 1788.533704] env[62519]: _type = "Task" [ 1788.533704] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.546951] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a23693-b79e-1752-7223-15c90bc27790, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.733564] env[62519]: DEBUG nova.network.neutron [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Updated VIF entry in instance network info cache for port 87d0022f-848b-4f80-b7c3-e234bf681457. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1788.734296] env[62519]: DEBUG nova.network.neutron [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Updating instance_info_cache with network_info: [{"id": "87d0022f-848b-4f80-b7c3-e234bf681457", "address": "fa:16:3e:11:a1:ae", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87d0022f-84", "ovs_interfaceid": "87d0022f-848b-4f80-b7c3-e234bf681457", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.754697] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af64bb9-c917-427d-90fd-7e5aa2b0eb19 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.763780] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7769cc-b642-4f57-896a-1bf50a9bddf3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.795733] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75df052f-2c3e-4524-ad34-17915bf07127 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.803608] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fdf311-cb90-48fc-afa1-7a2ec63cdd38 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.816841] env[62519]: DEBUG nova.compute.provider_tree [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1788.876845] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]528a59f4-d786-6bbd-1b3c-657f6b7b6e50, 'name': SearchDatastore_Task, 'duration_secs': 0.027887} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.877139] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.877401] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ee4b10ba-1c56-47cf-a528-d6e65c286ddb/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. {{(pid=62519) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1788.877655] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fb72e41-caf4-466f-b254-a5ff445fa310 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.885212] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1788.885212] env[62519]: value = "task-1802971" [ 1788.885212] env[62519]: _type = "Task" [ 1788.885212] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.894272] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.971348] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 76786353-f93f-4e7e-b3f7-7f22ae4b7b41] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1789.044706] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a23693-b79e-1752-7223-15c90bc27790, 'name': SearchDatastore_Task, 'duration_secs': 0.015463} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.045019] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.045288] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1789.045542] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.045696] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.045875] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1789.046159] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66efa140-3505-4e14-8ad4-7e96eb333edd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.055078] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1789.055387] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1789.056384] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-433bddb7-315f-4b68-925d-ef451402f961 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.064701] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1789.064701] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529c1f47-a566-7e0d-6ca6-afc87917b16b" [ 1789.064701] env[62519]: _type = "Task" [ 1789.064701] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.074064] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529c1f47-a566-7e0d-6ca6-afc87917b16b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.237970] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d95e042-fb84-4878-ae52-22ef673b6d33 req-574cd40c-b796-4657-b6f3-aad5b657d166 service nova] Releasing lock "refresh_cache-358ee402-1112-4eea-a518-a45a6bf92c31" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.320532] env[62519]: DEBUG nova.scheduler.client.report [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1789.399070] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802971, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513066} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.399554] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] ee4b10ba-1c56-47cf-a528-d6e65c286ddb/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. [ 1789.400576] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ca8c00-6784-4ef8-8904-a3f96f99a7f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.428543] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] ee4b10ba-1c56-47cf-a528-d6e65c286ddb/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1789.428763] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2da22afe-a614-41be-ad8c-ceb5c6076d84 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.447825] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1789.447825] env[62519]: value = "task-1802972" [ 1789.447825] env[62519]: _type = "Task" [ 1789.447825] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.457180] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802972, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.474599] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c8816718-0a35-4474-b162-c619b0acc154] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1789.576204] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529c1f47-a566-7e0d-6ca6-afc87917b16b, 'name': SearchDatastore_Task, 'duration_secs': 0.072415} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.577065] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad037e0b-05a6-45f6-a129-6772fa258468 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.582782] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1789.582782] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52df11b5-bad5-c3f0-9b14-7d5795da236e" [ 1789.582782] env[62519]: _type = "Task" [ 1789.582782] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.591462] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52df11b5-bad5-c3f0-9b14-7d5795da236e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.826035] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.889s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.826608] env[62519]: DEBUG nova.compute.manager [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1789.829441] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.897s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.830878] env[62519]: INFO nova.compute.claims [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1789.959031] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802972, 'name': ReconfigVM_Task, 'duration_secs': 0.329566} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.959437] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Reconfigured VM instance instance-00000053 to attach disk [datastore1] ee4b10ba-1c56-47cf-a528-d6e65c286ddb/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1789.960284] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b611a7-3465-41a4-9ce4-d657014799f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.981413] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: dd60afd6-2834-4fca-a846-e39d57aabd60] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1789.989610] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d535523-f4c2-46bf-9ba1-19699a37807a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.008122] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1790.008122] env[62519]: value = "task-1802973" [ 1790.008122] env[62519]: _type = "Task" [ 1790.008122] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.016984] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802973, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.094134] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52df11b5-bad5-c3f0-9b14-7d5795da236e, 'name': SearchDatastore_Task, 'duration_secs': 0.010418} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.094418] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.094698] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 358ee402-1112-4eea-a518-a45a6bf92c31/358ee402-1112-4eea-a518-a45a6bf92c31.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1790.094978] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2cafd540-f181-44eb-99b4-2bd876c0c9d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.101982] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1790.101982] env[62519]: value = "task-1802974" [ 1790.101982] env[62519]: _type = "Task" [ 1790.101982] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.111926] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.337192] env[62519]: DEBUG nova.compute.utils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1790.343892] env[62519]: DEBUG nova.compute.manager [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1790.344365] env[62519]: DEBUG nova.network.neutron [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1790.389315] env[62519]: DEBUG nova.policy [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '320ab8e6ce2e49fd9d5ca96fb2297209', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ce3ea0129bd4299bec8bdb2bf2bd096', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1790.485310] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 156ed02a-3365-4a4f-b4de-ea86920d3baf] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1790.518948] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802973, 'name': ReconfigVM_Task, 'duration_secs': 0.171278} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.519285] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1790.519551] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-673e6252-fe6f-4ede-afbe-ac0200d33b39 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.528334] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1790.528334] env[62519]: value = "task-1802975" [ 1790.528334] env[62519]: _type = "Task" [ 1790.528334] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.538021] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.615065] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509804} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.615737] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 358ee402-1112-4eea-a518-a45a6bf92c31/358ee402-1112-4eea-a518-a45a6bf92c31.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1790.616063] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1790.616258] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e4fab23-5b3a-447c-ae23-0201e1f54e5f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.624193] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1790.624193] env[62519]: value = "task-1802976" [ 1790.624193] env[62519]: _type = "Task" [ 1790.624193] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.635492] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.708159] env[62519]: DEBUG nova.network.neutron [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Successfully created port: b2132242-f6ec-4f0b-a6ef-d49e2d0db056 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1790.845067] env[62519]: DEBUG nova.compute.manager [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1790.988510] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: e3a19583-b332-40e3-bdd0-d254f7a78b0a] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1791.038971] env[62519]: DEBUG oslo_vmware.api [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802975, 'name': PowerOnVM_Task, 'duration_secs': 0.468725} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.041345] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1791.043884] env[62519]: DEBUG nova.compute.manager [None req-c521a60d-baf9-42df-b80a-31718554c485 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1791.044660] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ca5e49-4c81-41c8-beb7-3e864e944edc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.135730] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073507} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.136242] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1791.137569] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48e6e85-5297-401a-97ab-bcb03beab2e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.140849] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1815eb6c-ae45-4c38-8533-6f0f335ded5f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.157983] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c4a5a8-7783-4a4a-9f40-c8dfcb91c2da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.170309] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 358ee402-1112-4eea-a518-a45a6bf92c31/358ee402-1112-4eea-a518-a45a6bf92c31.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1791.170644] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-835f1e9e-3cc6-44b4-b3b0-cb31cdf377ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.193420] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1791.193420] env[62519]: value = "task-1802977" [ 1791.193420] env[62519]: _type = "Task" [ 1791.193420] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.221776] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54859b55-e66d-4663-a510-fe8a7afb6410 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.233106] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabab5a7-6f92-427f-8d76-2945ec6fa4ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.237251] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802977, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.248150] env[62519]: DEBUG nova.compute.provider_tree [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1791.492585] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: d710c97b-a2fd-4a54-baaa-ec7664895ce7] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1791.728056] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802977, 'name': ReconfigVM_Task, 'duration_secs': 0.333975} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.728277] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 358ee402-1112-4eea-a518-a45a6bf92c31/358ee402-1112-4eea-a518-a45a6bf92c31.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1791.728945] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b162a34-2734-454f-9eb8-dd1c108838af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.736228] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1791.736228] env[62519]: value = "task-1802978" [ 1791.736228] env[62519]: _type = "Task" [ 1791.736228] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.744456] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802978, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.768782] env[62519]: ERROR nova.scheduler.client.report [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [req-2297dcc5-f3d9-4a2b-8870-363a487a01d1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2297dcc5-f3d9-4a2b-8870-363a487a01d1"}]} [ 1791.785065] env[62519]: DEBUG nova.scheduler.client.report [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1791.799235] env[62519]: DEBUG nova.scheduler.client.report [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1791.799531] env[62519]: DEBUG nova.compute.provider_tree [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1791.810684] env[62519]: DEBUG nova.scheduler.client.report [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1791.829855] env[62519]: DEBUG nova.scheduler.client.report [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1791.858463] env[62519]: DEBUG nova.compute.manager [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1791.886652] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1791.886796] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1791.886899] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1791.887096] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1791.887246] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1791.887391] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1791.887822] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1791.887822] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1791.888082] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1791.888082] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1791.888247] env[62519]: DEBUG nova.virt.hardware [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1791.889182] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ef07e9-33c6-43a3-80b6-83b4d356676f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.901027] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2515002-1d6b-4d6c-8d43-ce8ddde85516 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.995858] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 0789b142-4712-4b7a-9197-c3689f24df7c] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1792.103161] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defee139-2a8f-44a9-be2d-9accc47de111 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.112335] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f08a0d-2dbe-454c-a0f4-b5494bc88d94 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.144555] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4899ce20-0167-4ff1-b43d-a4516c6ff13c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.154239] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d87a9d-8cca-48cb-b276-642dba35afb5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.171991] env[62519]: DEBUG nova.compute.provider_tree [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1792.248588] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802978, 'name': Rename_Task, 'duration_secs': 0.153479} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.248948] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1792.249219] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36740ca3-5738-4dc6-a8d7-7784a3ef74bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.256445] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1792.256445] env[62519]: value = "task-1802979" [ 1792.256445] env[62519]: _type = "Task" [ 1792.256445] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.264391] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.318038] env[62519]: DEBUG nova.compute.manager [req-7f06648a-7b75-4ff5-9137-efd5873854e2 req-61997dfc-0086-4ce8-9ee5-d1211e7a65c8 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Received event network-vif-plugged-b2132242-f6ec-4f0b-a6ef-d49e2d0db056 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1792.318242] env[62519]: DEBUG oslo_concurrency.lockutils [req-7f06648a-7b75-4ff5-9137-efd5873854e2 req-61997dfc-0086-4ce8-9ee5-d1211e7a65c8 service nova] Acquiring lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.318450] env[62519]: DEBUG oslo_concurrency.lockutils [req-7f06648a-7b75-4ff5-9137-efd5873854e2 req-61997dfc-0086-4ce8-9ee5-d1211e7a65c8 service nova] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.318614] env[62519]: DEBUG oslo_concurrency.lockutils [req-7f06648a-7b75-4ff5-9137-efd5873854e2 req-61997dfc-0086-4ce8-9ee5-d1211e7a65c8 service nova] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.318845] env[62519]: DEBUG nova.compute.manager [req-7f06648a-7b75-4ff5-9137-efd5873854e2 req-61997dfc-0086-4ce8-9ee5-d1211e7a65c8 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] No waiting events found dispatching network-vif-plugged-b2132242-f6ec-4f0b-a6ef-d49e2d0db056 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1792.319173] env[62519]: WARNING nova.compute.manager [req-7f06648a-7b75-4ff5-9137-efd5873854e2 req-61997dfc-0086-4ce8-9ee5-d1211e7a65c8 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Received unexpected event network-vif-plugged-b2132242-f6ec-4f0b-a6ef-d49e2d0db056 for instance with vm_state building and task_state spawning. [ 1792.424383] env[62519]: DEBUG nova.network.neutron [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Successfully updated port: b2132242-f6ec-4f0b-a6ef-d49e2d0db056 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1792.500966] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 37689ec5-4bc8-4361-9e43-00529e2b5538] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1792.523888] env[62519]: INFO nova.compute.manager [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Unrescuing [ 1792.524015] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.524189] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.524359] env[62519]: DEBUG nova.network.neutron [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1792.716769] env[62519]: DEBUG nova.scheduler.client.report [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1792.717221] env[62519]: DEBUG nova.compute.provider_tree [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 126 to 127 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1792.717548] env[62519]: DEBUG nova.compute.provider_tree [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1792.766581] env[62519]: DEBUG oslo_vmware.api [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1802979, 'name': PowerOnVM_Task, 'duration_secs': 0.47046} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.766916] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1792.767147] env[62519]: INFO nova.compute.manager [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Took 7.25 seconds to spawn the instance on the hypervisor. [ 1792.767318] env[62519]: DEBUG nova.compute.manager [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1792.768151] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8386280-9bce-490c-8cd3-fc23cd176ff3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.927470] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquiring lock "refresh_cache-0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.927697] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquired lock "refresh_cache-0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.927847] env[62519]: DEBUG nova.network.neutron [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1793.004169] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 83a680b9-0c2d-4231-9ddf-9aa90209c620] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1793.223687] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.394s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.224236] env[62519]: DEBUG nova.compute.manager [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1793.228205] env[62519]: DEBUG nova.network.neutron [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Updating instance_info_cache with network_info: [{"id": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "address": "fa:16:3e:1f:a9:ee", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99b49b4e-ab", "ovs_interfaceid": "99b49b4e-aba5-450d-a2db-3b35a0313a5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.229773] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.590s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.231669] env[62519]: INFO nova.compute.claims [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1793.286945] env[62519]: INFO nova.compute.manager [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Took 26.21 seconds to build instance. [ 1793.472819] env[62519]: DEBUG nova.network.neutron [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1793.507349] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 67424299-f100-49a1-ab73-0407b60a2d9f] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1793.623261] env[62519]: DEBUG nova.network.neutron [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Updating instance_info_cache with network_info: [{"id": "b2132242-f6ec-4f0b-a6ef-d49e2d0db056", "address": "fa:16:3e:8a:6a:7a", "network": {"id": "0f8b42bc-c22c-4b20-98b0-587026f270d6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-440100259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ce3ea0129bd4299bec8bdb2bf2bd096", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2132242-f6", "ovs_interfaceid": "b2132242-f6ec-4f0b-a6ef-d49e2d0db056", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.736469] env[62519]: DEBUG nova.compute.utils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1793.738497] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "refresh_cache-ee4b10ba-1c56-47cf-a528-d6e65c286ddb" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.739288] env[62519]: DEBUG nova.objects.instance [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lazy-loading 'flavor' on Instance uuid ee4b10ba-1c56-47cf-a528-d6e65c286ddb {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1793.742887] env[62519]: DEBUG nova.compute.manager [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1793.743019] env[62519]: DEBUG nova.network.neutron [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1793.781712] env[62519]: DEBUG nova.policy [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eca5c7b079d4785941d68d7c51df5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63a46158057949478e5c79fbe0d4d5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1793.911021] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "21daee7a-1788-4a1c-969e-65b696702bf2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.911319] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "21daee7a-1788-4a1c-969e-65b696702bf2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.013071] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 1cf285cf-8b4c-4872-b179-72e38c0143e0] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1794.071262] env[62519]: DEBUG nova.network.neutron [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Successfully created port: 037e7926-5eac-487b-a2fa-4124ead561df {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1794.126066] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Releasing lock "refresh_cache-0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.126422] env[62519]: DEBUG nova.compute.manager [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Instance network_info: |[{"id": "b2132242-f6ec-4f0b-a6ef-d49e2d0db056", "address": "fa:16:3e:8a:6a:7a", "network": {"id": "0f8b42bc-c22c-4b20-98b0-587026f270d6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-440100259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ce3ea0129bd4299bec8bdb2bf2bd096", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2132242-f6", "ovs_interfaceid": "b2132242-f6ec-4f0b-a6ef-d49e2d0db056", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1794.126848] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:6a:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a0d5af-5be9-477a-837c-58ef55c717f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2132242-f6ec-4f0b-a6ef-d49e2d0db056', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1794.134584] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Creating folder: Project (1ce3ea0129bd4299bec8bdb2bf2bd096). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1794.134904] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-649d4da3-76d3-4511-9aa3-1440c8903c59 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.148610] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Created folder: Project (1ce3ea0129bd4299bec8bdb2bf2bd096) in parent group-v373567. [ 1794.148885] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Creating folder: Instances. Parent ref: group-v373802. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1794.149172] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecd57731-389d-492d-b0e3-1eb2b7c04f06 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.161184] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Created folder: Instances in parent group-v373802. [ 1794.161466] env[62519]: DEBUG oslo.service.loopingcall [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1794.161666] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1794.161874] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd8a197d-18a1-4e33-9706-6bb6168c6b0a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.184617] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1794.184617] env[62519]: value = "task-1802982" [ 1794.184617] env[62519]: _type = "Task" [ 1794.184617] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.205215] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802982, 'name': CreateVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.244509] env[62519]: DEBUG nova.compute.manager [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1794.256699] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed4911a-f47b-4b22-90bd-376a6334e59b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.283368] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1794.283694] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7cf3242b-7433-4e49-82c5-c1c5604a0f81 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.294254] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1794.294254] env[62519]: value = "task-1802983" [ 1794.294254] env[62519]: _type = "Task" [ 1794.294254] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.306505] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802983, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.354033] env[62519]: DEBUG nova.compute.manager [req-33a21314-35b3-4e43-90e3-835903213e08 req-45229490-92c8-4cf3-b79e-b62a982cb065 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Received event network-changed-b2132242-f6ec-4f0b-a6ef-d49e2d0db056 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1794.354033] env[62519]: DEBUG nova.compute.manager [req-33a21314-35b3-4e43-90e3-835903213e08 req-45229490-92c8-4cf3-b79e-b62a982cb065 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Refreshing instance network info cache due to event network-changed-b2132242-f6ec-4f0b-a6ef-d49e2d0db056. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1794.354033] env[62519]: DEBUG oslo_concurrency.lockutils [req-33a21314-35b3-4e43-90e3-835903213e08 req-45229490-92c8-4cf3-b79e-b62a982cb065 service nova] Acquiring lock "refresh_cache-0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.354033] env[62519]: DEBUG oslo_concurrency.lockutils [req-33a21314-35b3-4e43-90e3-835903213e08 req-45229490-92c8-4cf3-b79e-b62a982cb065 service nova] Acquired lock "refresh_cache-0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.354352] env[62519]: DEBUG nova.network.neutron [req-33a21314-35b3-4e43-90e3-835903213e08 req-45229490-92c8-4cf3-b79e-b62a982cb065 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Refreshing network info cache for port b2132242-f6ec-4f0b-a6ef-d49e2d0db056 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1794.413856] env[62519]: DEBUG nova.compute.manager [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1794.516947] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 54a8aa34-1595-4494-ba68-6915611631ce] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1794.664596] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb4ebfc-e4af-400b-8255-ac84c02948a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.673112] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efaa4ea9-88a6-4efb-9ee0-ff51b1bbccfc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.710901] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1022d95-913b-4ba0-bc80-c6349b834722 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.722332] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704d5600-79a8-415e-ade6-4643a104cd9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.726044] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802982, 'name': CreateVM_Task, 'duration_secs': 0.421041} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.726218] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1794.727261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.727794] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.727794] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1794.727978] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eeb2404-1898-468a-9ebc-f9b1a99ded55 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.737320] env[62519]: DEBUG nova.compute.provider_tree [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1794.742505] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1794.742505] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524c859f-c352-fb44-98d6-d3beaf2defc7" [ 1794.742505] env[62519]: _type = "Task" [ 1794.742505] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.750780] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524c859f-c352-fb44-98d6-d3beaf2defc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.794951] env[62519]: DEBUG oslo_concurrency.lockutils [None req-13b523f6-ff81-4d94-906d-b58b94706344 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "358ee402-1112-4eea-a518-a45a6bf92c31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.725s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.807630] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802983, 'name': PowerOffVM_Task, 'duration_secs': 0.260933} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.807914] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1794.813188] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Reconfiguring VM instance instance-00000053 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1794.813487] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-962678f5-ff53-43b3-a55c-128d7dcab3ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.833856] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1794.833856] env[62519]: value = "task-1802984" [ 1794.833856] env[62519]: _type = "Task" [ 1794.833856] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.842699] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802984, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.938645] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.022902] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: ad0af10d-5063-4344-b12f-1d3ee9ea1090] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1795.062358] env[62519]: DEBUG nova.network.neutron [req-33a21314-35b3-4e43-90e3-835903213e08 req-45229490-92c8-4cf3-b79e-b62a982cb065 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Updated VIF entry in instance network info cache for port b2132242-f6ec-4f0b-a6ef-d49e2d0db056. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1795.062731] env[62519]: DEBUG nova.network.neutron [req-33a21314-35b3-4e43-90e3-835903213e08 req-45229490-92c8-4cf3-b79e-b62a982cb065 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Updating instance_info_cache with network_info: [{"id": "b2132242-f6ec-4f0b-a6ef-d49e2d0db056", "address": "fa:16:3e:8a:6a:7a", "network": {"id": "0f8b42bc-c22c-4b20-98b0-587026f270d6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-440100259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ce3ea0129bd4299bec8bdb2bf2bd096", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2132242-f6", "ovs_interfaceid": "b2132242-f6ec-4f0b-a6ef-d49e2d0db056", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.240303] env[62519]: DEBUG nova.scheduler.client.report [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1795.257631] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524c859f-c352-fb44-98d6-d3beaf2defc7, 'name': SearchDatastore_Task, 'duration_secs': 0.034965} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.258108] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.258468] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1795.258836] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.259114] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.259430] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1795.259732] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-521fa4d0-4f5c-4539-b96f-aea7ea4813e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.263495] env[62519]: DEBUG nova.compute.manager [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1795.274471] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1795.274679] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1795.275673] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-616614dd-9e2d-403b-acef-d5aa8bbdf1ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.281975] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1795.281975] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520b2c64-3b7f-3232-4f4a-da22e1735c13" [ 1795.281975] env[62519]: _type = "Task" [ 1795.281975] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.294015] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520b2c64-3b7f-3232-4f4a-da22e1735c13, 'name': SearchDatastore_Task, 'duration_secs': 0.009407} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.296190] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1795.296558] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1795.296635] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1795.296758] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1795.296953] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1795.297133] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1795.297343] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1795.297504] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1795.297661] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1795.297817] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1795.297986] env[62519]: DEBUG nova.virt.hardware [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1795.299479] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d65ba7-dbab-4267-856d-10928189d5ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.301988] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46803e6f-f567-48b3-ba09-259c5033fb3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.308434] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1795.308434] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522dab0d-c407-dbdf-4492-dfee4a8fe355" [ 1795.308434] env[62519]: _type = "Task" [ 1795.308434] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.314464] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8118aca2-e4dc-418b-8876-b5cbac70f2c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.334356] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522dab0d-c407-dbdf-4492-dfee4a8fe355, 'name': SearchDatastore_Task, 'duration_secs': 0.009567} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.335018] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.335433] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17/0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1795.339107] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af3bbcd9-08a1-49e9-8642-b88887e8e989 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.347782] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802984, 'name': ReconfigVM_Task, 'duration_secs': 0.411594} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.349438] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Reconfigured VM instance instance-00000053 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1795.349695] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1795.350072] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1795.350072] env[62519]: value = "task-1802985" [ 1795.350072] env[62519]: _type = "Task" [ 1795.350072] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.350315] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e9e30e4-601a-497a-8d8d-c57067a6d22d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.362620] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.364026] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1795.364026] env[62519]: value = "task-1802986" [ 1795.364026] env[62519]: _type = "Task" [ 1795.364026] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.371941] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802986, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.526838] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 45d1aa86-a5c8-4e75-a6c8-5f55461702f8] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1795.565614] env[62519]: DEBUG oslo_concurrency.lockutils [req-33a21314-35b3-4e43-90e3-835903213e08 req-45229490-92c8-4cf3-b79e-b62a982cb065 service nova] Releasing lock "refresh_cache-0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.752833] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.752833] env[62519]: DEBUG nova.compute.manager [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1795.756240] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 20.138s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.807769] env[62519]: DEBUG nova.network.neutron [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Successfully updated port: 037e7926-5eac-487b-a2fa-4124ead561df {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1795.864991] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514524} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.868609] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17/0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1795.869735] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1795.869735] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e7198a5-a6a9-49f0-a7f0-1534ca561e16 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.877614] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802986, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.879164] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1795.879164] env[62519]: value = "task-1802987" [ 1795.879164] env[62519]: _type = "Task" [ 1795.879164] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.888783] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.030988] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 8659f63a-5df9-4ff8-84dd-0722026dc820] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1796.259968] env[62519]: DEBUG nova.compute.utils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1796.264017] env[62519]: INFO nova.compute.claims [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1796.268383] env[62519]: DEBUG nova.compute.manager [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1796.268553] env[62519]: DEBUG nova.network.neutron [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1796.311606] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.311780] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.311896] env[62519]: DEBUG nova.network.neutron [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1796.319770] env[62519]: DEBUG nova.policy [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b04fa80750184b97a16ec1880e0a585c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '683d60927fdf424386ffcfaa344a7af6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1796.375606] env[62519]: DEBUG oslo_vmware.api [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1802986, 'name': PowerOnVM_Task, 'duration_secs': 0.631454} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.375912] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1796.376200] env[62519]: DEBUG nova.compute.manager [None req-f108ee23-58f0-4a51-812f-2782fcb74054 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1796.377016] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad22308-b616-434b-9c8a-d70c65f0dad7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.385859] env[62519]: DEBUG nova.compute.manager [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Received event network-vif-plugged-037e7926-5eac-487b-a2fa-4124ead561df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1796.386153] env[62519]: DEBUG oslo_concurrency.lockutils [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] Acquiring lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.386282] env[62519]: DEBUG oslo_concurrency.lockutils [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.386447] env[62519]: DEBUG oslo_concurrency.lockutils [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.386609] env[62519]: DEBUG nova.compute.manager [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] No waiting events found dispatching network-vif-plugged-037e7926-5eac-487b-a2fa-4124ead561df {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1796.386774] env[62519]: WARNING nova.compute.manager [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Received unexpected event network-vif-plugged-037e7926-5eac-487b-a2fa-4124ead561df for instance with vm_state building and task_state spawning. [ 1796.386928] env[62519]: DEBUG nova.compute.manager [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Received event network-changed-037e7926-5eac-487b-a2fa-4124ead561df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1796.387089] env[62519]: DEBUG nova.compute.manager [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Refreshing instance network info cache due to event network-changed-037e7926-5eac-487b-a2fa-4124ead561df. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1796.387274] env[62519]: DEBUG oslo_concurrency.lockutils [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] Acquiring lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.394051] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070311} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.394983] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1796.395750] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da6271d-5433-4965-ab2d-628ebd944318 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.418810] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17/0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1796.419178] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-235f71c3-37b8-4421-a7e8-c401362fffcd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.440947] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1796.440947] env[62519]: value = "task-1802988" [ 1796.440947] env[62519]: _type = "Task" [ 1796.440947] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.451354] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802988, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.535178] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: ed716912-752e-4c6d-b6c6-fb349668fa93] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1796.598031] env[62519]: DEBUG nova.network.neutron [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Successfully created port: bc9c80f3-eef8-4cad-b111-3766f01949f7 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1796.773059] env[62519]: INFO nova.compute.resource_tracker [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating resource usage from migration 2ff5e489-a567-4b94-8bed-d596558da36f [ 1796.776740] env[62519]: DEBUG nova.compute.manager [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1796.860114] env[62519]: DEBUG nova.network.neutron [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1796.959547] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802988, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.024987] env[62519]: DEBUG nova.network.neutron [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updating instance_info_cache with network_info: [{"id": "037e7926-5eac-487b-a2fa-4124ead561df", "address": "fa:16:3e:f0:b9:0e", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e7926-5e", "ovs_interfaceid": "037e7926-5eac-487b-a2fa-4124ead561df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.039089] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 540167be-5295-4e28-9b25-16317746dd0e] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1797.128602] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3170e895-be85-47cb-b249-fc4b18707455 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.136639] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8135863c-5e48-4505-bee3-9ac883b6a7c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.168346] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061ef950-e7d7-4f85-a446-188aa73f7a17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.177088] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd77ac95-f7d2-483c-883a-812ed63546f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.193076] env[62519]: DEBUG nova.compute.provider_tree [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1797.451626] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802988, 'name': ReconfigVM_Task, 'duration_secs': 0.999503} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.451930] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17/0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1797.452579] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d341bef5-ab19-4635-9a0e-fd532eac9bbd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.458743] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1797.458743] env[62519]: value = "task-1802989" [ 1797.458743] env[62519]: _type = "Task" [ 1797.458743] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.466637] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802989, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.527733] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.528097] env[62519]: DEBUG nova.compute.manager [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Instance network_info: |[{"id": "037e7926-5eac-487b-a2fa-4124ead561df", "address": "fa:16:3e:f0:b9:0e", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e7926-5e", "ovs_interfaceid": "037e7926-5eac-487b-a2fa-4124ead561df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1797.528429] env[62519]: DEBUG oslo_concurrency.lockutils [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] Acquired lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.528592] env[62519]: DEBUG nova.network.neutron [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Refreshing network info cache for port 037e7926-5eac-487b-a2fa-4124ead561df {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1797.529875] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:b9:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '037e7926-5eac-487b-a2fa-4124ead561df', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1797.537253] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Creating folder: Project (63a46158057949478e5c79fbe0d4d5d4). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1797.540385] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62598d41-c187-42f6-b82d-1cb1f0e29cba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.546528] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: d5b6b7b4-ebc4-4d3d-8e16-bf0a4e974d67] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1797.552854] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Created folder: Project (63a46158057949478e5c79fbe0d4d5d4) in parent group-v373567. [ 1797.552854] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Creating folder: Instances. Parent ref: group-v373805. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1797.553059] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47b12415-f451-4a5a-a2d3-266800f7c18b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.563726] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Created folder: Instances in parent group-v373805. [ 1797.563974] env[62519]: DEBUG oslo.service.loopingcall [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1797.564183] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1797.564386] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d12990c6-61d2-4602-b2f2-c50a31ea1222 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.586948] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1797.586948] env[62519]: value = "task-1802992" [ 1797.586948] env[62519]: _type = "Task" [ 1797.586948] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.596343] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802992, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.698519] env[62519]: DEBUG nova.scheduler.client.report [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1797.775031] env[62519]: DEBUG nova.network.neutron [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updated VIF entry in instance network info cache for port 037e7926-5eac-487b-a2fa-4124ead561df. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1797.775031] env[62519]: DEBUG nova.network.neutron [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updating instance_info_cache with network_info: [{"id": "037e7926-5eac-487b-a2fa-4124ead561df", "address": "fa:16:3e:f0:b9:0e", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e7926-5e", "ovs_interfaceid": "037e7926-5eac-487b-a2fa-4124ead561df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.786307] env[62519]: DEBUG nova.compute.manager [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1797.816792] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1797.816792] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1797.817018] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1797.817121] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1797.817271] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1797.817416] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1797.817635] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1797.817794] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1797.818027] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1797.818129] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1797.818347] env[62519]: DEBUG nova.virt.hardware [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1797.819294] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33db023-a7e7-4a4c-80a9-8b92d65d0297 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.828406] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893f501e-25c2-4b2c-b73e-1290cb15a6e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.969875] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802989, 'name': Rename_Task, 'duration_secs': 0.182324} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.970249] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1797.970513] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ffbfcb8-cfc9-440e-a43a-371e6749c4eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.977471] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1797.977471] env[62519]: value = "task-1802993" [ 1797.977471] env[62519]: _type = "Task" [ 1797.977471] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.987369] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802993, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.050094] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c60f5d73-9d6d-4b5f-b71b-00b6b787d482] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1798.099488] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802992, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.172915] env[62519]: DEBUG nova.network.neutron [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Successfully updated port: bc9c80f3-eef8-4cad-b111-3766f01949f7 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1798.203792] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.447s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.204099] env[62519]: INFO nova.compute.manager [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Migrating [ 1798.212351] env[62519]: DEBUG oslo_concurrency.lockutils [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.204s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.212532] env[62519]: DEBUG oslo_concurrency.lockutils [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.215261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.564s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.215261] env[62519]: DEBUG nova.objects.instance [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1798.243317] env[62519]: INFO nova.scheduler.client.report [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Deleted allocations for instance f0925a44-c15b-4415-99bc-1b2366292fe4 [ 1798.277136] env[62519]: DEBUG oslo_concurrency.lockutils [req-65465c23-2126-40cb-b106-0dd9f159f752 req-32a55578-6b6b-4f69-a8e6-6360c85f3375 service nova] Releasing lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.429193] env[62519]: DEBUG nova.compute.manager [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Received event network-vif-plugged-bc9c80f3-eef8-4cad-b111-3766f01949f7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1798.429408] env[62519]: DEBUG oslo_concurrency.lockutils [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] Acquiring lock "2bc8f11e-82fc-4acb-945e-15327c133920-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.429612] env[62519]: DEBUG oslo_concurrency.lockutils [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] Lock "2bc8f11e-82fc-4acb-945e-15327c133920-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.429858] env[62519]: DEBUG oslo_concurrency.lockutils [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] Lock "2bc8f11e-82fc-4acb-945e-15327c133920-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.429962] env[62519]: DEBUG nova.compute.manager [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] No waiting events found dispatching network-vif-plugged-bc9c80f3-eef8-4cad-b111-3766f01949f7 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1798.430172] env[62519]: WARNING nova.compute.manager [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Received unexpected event network-vif-plugged-bc9c80f3-eef8-4cad-b111-3766f01949f7 for instance with vm_state building and task_state spawning. [ 1798.430317] env[62519]: DEBUG nova.compute.manager [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Received event network-changed-bc9c80f3-eef8-4cad-b111-3766f01949f7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1798.430437] env[62519]: DEBUG nova.compute.manager [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Refreshing instance network info cache due to event network-changed-bc9c80f3-eef8-4cad-b111-3766f01949f7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1798.430616] env[62519]: DEBUG oslo_concurrency.lockutils [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] Acquiring lock "refresh_cache-2bc8f11e-82fc-4acb-945e-15327c133920" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.430784] env[62519]: DEBUG oslo_concurrency.lockutils [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] Acquired lock "refresh_cache-2bc8f11e-82fc-4acb-945e-15327c133920" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.430902] env[62519]: DEBUG nova.network.neutron [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Refreshing network info cache for port bc9c80f3-eef8-4cad-b111-3766f01949f7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1798.488385] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802993, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.554260] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 9ac3344d-219a-487f-b83f-96c17cd86dad] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1798.600123] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802992, 'name': CreateVM_Task, 'duration_secs': 0.593616} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.600326] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1798.601292] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.601474] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.601782] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1798.602059] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1e7b886-c01f-42be-bb8f-b8c89b3cdb6b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.607481] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1798.607481] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ec651e-198f-c5a7-b9e6-b4388e929126" [ 1798.607481] env[62519]: _type = "Task" [ 1798.607481] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.618072] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ec651e-198f-c5a7-b9e6-b4388e929126, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.675126] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-2bc8f11e-82fc-4acb-945e-15327c133920" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.728337] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.728516] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.728748] env[62519]: DEBUG nova.network.neutron [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1798.753021] env[62519]: DEBUG oslo_concurrency.lockutils [None req-664ac020-915d-43b3-aaa3-507984d945e1 tempest-ServersWithSpecificFlavorTestJSON-2102225660 tempest-ServersWithSpecificFlavorTestJSON-2102225660-project-member] Lock "f0925a44-c15b-4415-99bc-1b2366292fe4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.257s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.968480] env[62519]: DEBUG nova.network.neutron [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1798.991434] env[62519]: DEBUG oslo_vmware.api [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1802993, 'name': PowerOnVM_Task, 'duration_secs': 0.599039} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.991609] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1798.991755] env[62519]: INFO nova.compute.manager [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Took 7.13 seconds to spawn the instance on the hypervisor. [ 1798.991995] env[62519]: DEBUG nova.compute.manager [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1798.992694] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc5bf92-b5e6-4b07-8cd8-34c970f0a17a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.057865] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 302edcd3-bd6e-41da-b731-4d4c1bb5c3c1] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1799.060423] env[62519]: DEBUG nova.network.neutron [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1799.118701] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ec651e-198f-c5a7-b9e6-b4388e929126, 'name': SearchDatastore_Task, 'duration_secs': 0.021585} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.118958] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.119212] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1799.119442] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.119586] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.119760] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1799.120057] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86ec1815-4c77-4d88-a479-fb912a4ec587 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.132467] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1799.132624] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1799.133337] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7b0dc6a-3960-45fb-875a-784f9918afc1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.139313] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1799.139313] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522e80c8-47ab-b6fa-b722-e7be8a312732" [ 1799.139313] env[62519]: _type = "Task" [ 1799.139313] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.148208] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522e80c8-47ab-b6fa-b722-e7be8a312732, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.225425] env[62519]: DEBUG oslo_concurrency.lockutils [None req-087464bc-7aa6-42f5-9e15-0aed3a0c7295 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.226779] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.617s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.232968] env[62519]: INFO nova.compute.claims [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1799.473588] env[62519]: DEBUG nova.network.neutron [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance_info_cache with network_info: [{"id": "f8e70085-af0c-4731-8b61-cf5cd2460344", "address": "fa:16:3e:4f:98:8a", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8e70085-af", "ovs_interfaceid": "f8e70085-af0c-4731-8b61-cf5cd2460344", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1799.516240] env[62519]: INFO nova.compute.manager [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Took 26.90 seconds to build instance. [ 1799.562608] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 80ef3fd4-b9ef-4fd2-a991-feec78a0c81d] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1799.564978] env[62519]: DEBUG oslo_concurrency.lockutils [req-ddf0cdbf-8610-46b0-a2c2-6917719abe9f req-4677d994-6403-4dae-9215-abba9ea6e53f service nova] Releasing lock "refresh_cache-2bc8f11e-82fc-4acb-945e-15327c133920" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.565365] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-2bc8f11e-82fc-4acb-945e-15327c133920" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.565511] env[62519]: DEBUG nova.network.neutron [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1799.650882] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522e80c8-47ab-b6fa-b722-e7be8a312732, 'name': SearchDatastore_Task, 'duration_secs': 0.026214} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.651846] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7f8f0c8-bda6-488a-860a-fa04a4e2f3f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.658439] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1799.658439] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fb9054-01d3-bc1e-53b9-92e6df6a52c5" [ 1799.658439] env[62519]: _type = "Task" [ 1799.658439] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.671855] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fb9054-01d3-bc1e-53b9-92e6df6a52c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.719653] env[62519]: INFO nova.compute.manager [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Rebuilding instance [ 1799.776193] env[62519]: DEBUG nova.compute.manager [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1799.777179] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca7f3f2-8d5c-4337-9b22-02081b8c3fff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.820166] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.820443] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.977909] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.058579] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "f3665f89-1747-4567-9e56-c937d4ac81da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.058579] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.072372] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: dac173ff-1807-405f-a59c-bb2efef62a0c] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1800.112717] env[62519]: DEBUG nova.network.neutron [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1800.173885] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fb9054-01d3-bc1e-53b9-92e6df6a52c5, 'name': SearchDatastore_Task, 'duration_secs': 0.026221} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.174167] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.174426] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4/10bfd4ac-6f11-4c96-87a0-ce74bc1193c4.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1800.174688] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84caefc6-708e-45d5-bae3-d10d1b4d9046 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.183559] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1800.183559] env[62519]: value = "task-1802994" [ 1800.183559] env[62519]: _type = "Task" [ 1800.183559] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.195669] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1802994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.324224] env[62519]: DEBUG nova.compute.utils [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1800.381267] env[62519]: DEBUG nova.network.neutron [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Updating instance_info_cache with network_info: [{"id": "bc9c80f3-eef8-4cad-b111-3766f01949f7", "address": "fa:16:3e:4d:07:0b", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc9c80f3-ee", "ovs_interfaceid": "bc9c80f3-eef8-4cad-b111-3766f01949f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.561248] env[62519]: DEBUG nova.compute.manager [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1800.575513] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: ad374dd9-a92d-4b76-9609-7562346e05a8] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1800.599199] env[62519]: DEBUG nova.compute.manager [req-88613202-e1b0-476d-abe8-0a6a10c8ede9 req-c2491ad4-a159-4f7f-a36f-7cd38a3ded28 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Received event network-changed-b2132242-f6ec-4f0b-a6ef-d49e2d0db056 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1800.599499] env[62519]: DEBUG nova.compute.manager [req-88613202-e1b0-476d-abe8-0a6a10c8ede9 req-c2491ad4-a159-4f7f-a36f-7cd38a3ded28 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Refreshing instance network info cache due to event network-changed-b2132242-f6ec-4f0b-a6ef-d49e2d0db056. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1800.599727] env[62519]: DEBUG oslo_concurrency.lockutils [req-88613202-e1b0-476d-abe8-0a6a10c8ede9 req-c2491ad4-a159-4f7f-a36f-7cd38a3ded28 service nova] Acquiring lock "refresh_cache-0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.599970] env[62519]: DEBUG oslo_concurrency.lockutils [req-88613202-e1b0-476d-abe8-0a6a10c8ede9 req-c2491ad4-a159-4f7f-a36f-7cd38a3ded28 service nova] Acquired lock "refresh_cache-0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.600229] env[62519]: DEBUG nova.network.neutron [req-88613202-e1b0-476d-abe8-0a6a10c8ede9 req-c2491ad4-a159-4f7f-a36f-7cd38a3ded28 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Refreshing network info cache for port b2132242-f6ec-4f0b-a6ef-d49e2d0db056 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1800.654572] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9752c0d8-4761-4b46-941d-d9f25ab32cfb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.665298] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee6fc51-ae2e-4212-aa41-d7fda3f6d7af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.704213] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053c614c-7cf6-40f5-9a7c-a4adef03b02a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.719764] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a013d7c-51cb-410b-83c4-eb73fd2d5b68 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.724832] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1802994, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.740030] env[62519]: DEBUG nova.compute.provider_tree [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1800.802332] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1800.802728] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31011119-298f-47d7-9b82-5dc8b2bd90e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.812551] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1800.812551] env[62519]: value = "task-1802995" [ 1800.812551] env[62519]: _type = "Task" [ 1800.812551] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.824605] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.827511] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.884031] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-2bc8f11e-82fc-4acb-945e-15327c133920" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.884340] env[62519]: DEBUG nova.compute.manager [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Instance network_info: |[{"id": "bc9c80f3-eef8-4cad-b111-3766f01949f7", "address": "fa:16:3e:4d:07:0b", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc9c80f3-ee", "ovs_interfaceid": "bc9c80f3-eef8-4cad-b111-3766f01949f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1800.886429] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:07:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc9c80f3-eef8-4cad-b111-3766f01949f7', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1800.892884] env[62519]: DEBUG oslo.service.loopingcall [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1800.893247] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1800.893411] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f85ca54-9b7c-484d-8f5c-56ce9f7d5871 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.918041] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1800.918041] env[62519]: value = "task-1802996" [ 1800.918041] env[62519]: _type = "Task" [ 1800.918041] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.927139] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.026576] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aadb616d-bf15-4f6c-b422-c092eb205601 tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.425s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.078551] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 765cf18e-53a0-4cc6-ad0e-337a6f68915c] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1801.092340] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.212016] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1802994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671854} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.212280] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4/10bfd4ac-6f11-4c96-87a0-ce74bc1193c4.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1801.212492] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1801.212743] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f52bf9e5-fdc8-40e4-836b-ebad7528a609 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.220780] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1801.220780] env[62519]: value = "task-1802997" [ 1801.220780] env[62519]: _type = "Task" [ 1801.220780] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.229140] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1802997, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.244820] env[62519]: DEBUG nova.scheduler.client.report [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1801.322982] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802995, 'name': PowerOffVM_Task, 'duration_secs': 0.271975} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.323175] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1801.326800] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1801.326800] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-818c24ee-c43d-4529-82e2-01a835ec5177 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.332282] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1801.332282] env[62519]: value = "task-1802998" [ 1801.332282] env[62519]: _type = "Task" [ 1801.332282] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.340287] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1802998, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.434927] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.495205] env[62519]: DEBUG nova.network.neutron [req-88613202-e1b0-476d-abe8-0a6a10c8ede9 req-c2491ad4-a159-4f7f-a36f-7cd38a3ded28 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Updated VIF entry in instance network info cache for port b2132242-f6ec-4f0b-a6ef-d49e2d0db056. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1801.495548] env[62519]: DEBUG nova.network.neutron [req-88613202-e1b0-476d-abe8-0a6a10c8ede9 req-c2491ad4-a159-4f7f-a36f-7cd38a3ded28 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Updating instance_info_cache with network_info: [{"id": "b2132242-f6ec-4f0b-a6ef-d49e2d0db056", "address": "fa:16:3e:8a:6a:7a", "network": {"id": "0f8b42bc-c22c-4b20-98b0-587026f270d6", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-440100259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ce3ea0129bd4299bec8bdb2bf2bd096", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2132242-f6", "ovs_interfaceid": "b2132242-f6ec-4f0b-a6ef-d49e2d0db056", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.500709] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585684d2-2f1f-4b69-a367-8da5e32dccfc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.523224] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance '5da884af-d8d2-409b-99bd-e5370e44e9f0' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1801.582452] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 1b4efc0f-474f-4a32-b199-c14f27b183e2] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1801.731581] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1802997, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07456} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.731872] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1801.733612] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af071087-0a67-43b9-8275-413ddab8bf81 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.749711] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.750346] env[62519]: DEBUG nova.compute.manager [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1801.763760] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4/10bfd4ac-6f11-4c96-87a0-ce74bc1193c4.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1801.763760] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.287s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.763760] env[62519]: INFO nova.compute.claims [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1801.765861] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52c67070-8c03-4d76-9b0d-8bd61596e3b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.794012] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1801.794012] env[62519]: value = "task-1802999" [ 1801.794012] env[62519]: _type = "Task" [ 1801.794012] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.804955] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1802999, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.843922] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1801.844204] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1801.844506] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373701', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'name': 'volume-f0e225ed-ae23-4580-9ecb-e74214791c63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bace23b3-b7f4-4f3b-8986-0076440d096d', 'attached_at': '', 'detached_at': '', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'serial': 'f0e225ed-ae23-4580-9ecb-e74214791c63'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1801.845283] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9909e133-857d-4908-83c2-d29ec2d8a9e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.866857] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dfce7b-1f24-4275-a3fb-392fb5f38901 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.875359] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa8138f-75a4-4e98-ba14-23dfd50c6e36 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.901092] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1a640b-7a90-44f1-8055-413a8b906193 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.918708] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] The volume has not been displaced from its original location: [datastore1] volume-f0e225ed-ae23-4580-9ecb-e74214791c63/volume-f0e225ed-ae23-4580-9ecb-e74214791c63.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1801.924208] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Reconfiguring VM instance instance-0000003b to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1801.924686] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09527be0-c01b-4736-bd6a-c2226e5a7fcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.943460] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.943720] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.944041] env[62519]: INFO nova.compute.manager [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Attaching volume 9c0a59d7-6968-4064-b0ab-53e4c74275f5 to /dev/sdb [ 1801.957887] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1801.957887] env[62519]: value = "task-1803000" [ 1801.957887] env[62519]: _type = "Task" [ 1801.957887] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.958157] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.968777] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1803000, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.989345] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38314dfe-a832-4f3a-9b16-936f123fc869 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.997141] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6e17e5-968a-465f-ab7e-1f92dcff8493 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.000156] env[62519]: DEBUG oslo_concurrency.lockutils [req-88613202-e1b0-476d-abe8-0a6a10c8ede9 req-c2491ad4-a159-4f7f-a36f-7cd38a3ded28 service nova] Releasing lock "refresh_cache-0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.014718] env[62519]: DEBUG nova.virt.block_device [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Updating existing volume attachment record: 7e829aa8-6a8d-4c49-8929-b546f35f0833 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1802.029733] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1802.030095] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34428a7f-f236-49cb-963e-2e6de43d80ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.041017] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1802.041017] env[62519]: value = "task-1803001" [ 1802.041017] env[62519]: _type = "Task" [ 1802.041017] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.050805] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803001, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.087038] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: a1551278-a306-4534-8d8d-3b3a003dde04] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1802.263843] env[62519]: DEBUG nova.compute.utils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1802.265373] env[62519]: DEBUG nova.compute.manager [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1802.265649] env[62519]: DEBUG nova.network.neutron [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1802.305777] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1802999, 'name': ReconfigVM_Task, 'duration_secs': 0.418651} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.306112] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4/10bfd4ac-6f11-4c96-87a0-ce74bc1193c4.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1802.306837] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94ab6a1f-0890-4a63-b8d9-02ce880a8eb1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.313405] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1802.313405] env[62519]: value = "task-1803003" [ 1802.313405] env[62519]: _type = "Task" [ 1802.313405] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.322696] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803003, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.324262] env[62519]: DEBUG nova.policy [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5678fd4d1dd54eb3b2552d7feb876082', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d6c914bac4e4293ade25fd3da4576e7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1802.438941] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.469431] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1803000, 'name': ReconfigVM_Task, 'duration_secs': 0.375887} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.470674] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Reconfigured VM instance instance-0000003b to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1802.477673] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-575e78f7-b828-46ac-9c02-72a31b4563ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.492332] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1802.492332] env[62519]: value = "task-1803005" [ 1802.492332] env[62519]: _type = "Task" [ 1802.492332] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.505110] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1803005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.554651] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803001, 'name': PowerOffVM_Task, 'duration_secs': 0.234242} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.554651] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1802.554755] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance '5da884af-d8d2-409b-99bd-e5370e44e9f0' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1802.590885] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: d8780c40-0099-4ccc-84ae-72fbb14fa1ee] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1802.644481] env[62519]: DEBUG nova.network.neutron [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Successfully created port: 6541f904-9576-48c8-972b-84003d7a5315 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1802.769660] env[62519]: DEBUG nova.compute.manager [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1802.827712] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803003, 'name': Rename_Task, 'duration_secs': 0.170439} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.828000] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1802.828531] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bab78048-a5c8-42a5-93b0-d233db806be7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.840880] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1802.840880] env[62519]: value = "task-1803007" [ 1802.840880] env[62519]: _type = "Task" [ 1802.840880] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.852961] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803007, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.935656] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.003557] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1803005, 'name': ReconfigVM_Task, 'duration_secs': 0.164804} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.004067] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373701', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'name': 'volume-f0e225ed-ae23-4580-9ecb-e74214791c63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bace23b3-b7f4-4f3b-8986-0076440d096d', 'attached_at': '', 'detached_at': '', 'volume_id': 'f0e225ed-ae23-4580-9ecb-e74214791c63', 'serial': 'f0e225ed-ae23-4580-9ecb-e74214791c63'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1803.004312] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1803.005135] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb442a2-906b-4c75-8fa1-adceb1c8219c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.014153] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1803.016859] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ed5606a-fa4a-4d13-8dc1-583ac5726c71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.063720] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1803.064054] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1803.064229] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1803.064418] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1803.064564] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1803.064710] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1803.064920] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1803.065101] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1803.065270] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1803.065432] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1803.065602] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1803.074032] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da68b46e-9fc3-4f6c-a4d7-f07a13582952 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.092210] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1803.092210] env[62519]: value = "task-1803009" [ 1803.092210] env[62519]: _type = "Task" [ 1803.092210] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.096210] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 24cb49c8-b2ef-4ede-aea6-6e34081beca1] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1803.106463] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803009, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.156218] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309e92ec-e2b3-4878-a72f-d2d530438191 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.166068] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e3495d-b54d-4c4b-9f78-cb9e7b037c8e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.198660] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da35f3f-beab-4ef7-be77-9cd130268a13 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.208414] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ce8331-c88c-4cdc-b61d-c569c60a78a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.227615] env[62519]: DEBUG nova.compute.provider_tree [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1803.352392] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803007, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.437538] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.603455] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4e3dee19-b99a-4257-88da-1b0531e2c0f9] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1803.606167] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803009, 'name': ReconfigVM_Task, 'duration_secs': 0.222088} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.606167] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance '5da884af-d8d2-409b-99bd-e5370e44e9f0' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1803.731376] env[62519]: DEBUG nova.scheduler.client.report [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1803.780359] env[62519]: DEBUG nova.compute.manager [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1803.807982] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1803.808239] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1803.808392] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1803.808574] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1803.808718] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1803.808896] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1803.809160] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1803.809332] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1803.809894] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1803.809894] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1803.809894] env[62519]: DEBUG nova.virt.hardware [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1803.810976] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c6a817-30d1-4b1a-84a7-21e5c8f870bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.820661] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa5b7ca-32dc-4d56-94ad-cc852f831f80 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.850869] env[62519]: DEBUG oslo_vmware.api [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803007, 'name': PowerOnVM_Task, 'duration_secs': 0.623912} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.850869] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1803.850869] env[62519]: INFO nova.compute.manager [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Took 8.59 seconds to spawn the instance on the hypervisor. [ 1803.851282] env[62519]: DEBUG nova.compute.manager [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1803.851830] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6289f119-8fa7-4ab3-80ce-bbd682e3e12e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.939125] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.106078] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 27f9e890-4733-43aa-9bf1-351d42d75418] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1804.115946] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1804.116192] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1804.116379] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1804.116533] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1804.116862] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1804.117107] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1804.117556] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1804.117556] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1804.117761] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1804.117931] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1804.118273] env[62519]: DEBUG nova.virt.hardware [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1804.126756] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Reconfiguring VM instance instance-00000055 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1804.127083] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa0f4ed5-a418-4806-8ea7-0ad4a2dd03b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.148887] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1804.148887] env[62519]: value = "task-1803010" [ 1804.148887] env[62519]: _type = "Task" [ 1804.148887] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.163642] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803010, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.237101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.475s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.237608] env[62519]: DEBUG nova.compute.manager [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1804.240293] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.699s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.240693] env[62519]: DEBUG nova.objects.instance [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lazy-loading 'resources' on Instance uuid 4e7db12e-c7f7-4d2a-b797-1371fc839a9e {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1804.373814] env[62519]: INFO nova.compute.manager [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Took 31.46 seconds to build instance. [ 1804.448027] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.611285] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 8070aa59-3547-460a-b914-0e84620023d0] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1804.668190] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803010, 'name': ReconfigVM_Task, 'duration_secs': 0.364045} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.669210] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Reconfigured VM instance instance-00000055 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1804.670436] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0227bad8-2117-4b1c-a784-5108f4f6f447 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.698638] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 5da884af-d8d2-409b-99bd-e5370e44e9f0/5da884af-d8d2-409b-99bd-e5370e44e9f0.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1804.699175] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27fde66f-6f08-4398-8579-2f51043a62b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.720547] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1804.720547] env[62519]: value = "task-1803012" [ 1804.720547] env[62519]: _type = "Task" [ 1804.720547] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.732988] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803012, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.744619] env[62519]: DEBUG nova.compute.utils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1804.754056] env[62519]: DEBUG nova.compute.manager [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1804.754056] env[62519]: DEBUG nova.network.neutron [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1804.825747] env[62519]: DEBUG nova.policy [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b94adf2a04874e489cdadc04a95ae3af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9808ad7e38e34658aac06ebc932b0e32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1804.925786] env[62519]: DEBUG oslo_concurrency.lockutils [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.926050] env[62519]: DEBUG oslo_concurrency.lockutils [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.945006] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.112788] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: f19c860f-736a-4783-8ef5-8262040e53a3] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1805.122087] env[62519]: DEBUG nova.compute.manager [req-7739ca9f-433d-4173-b273-a56f4de771e4 req-8342070c-5263-4cda-bf90-021a1dc5b873 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Received event network-changed-037e7926-5eac-487b-a2fa-4124ead561df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1805.122300] env[62519]: DEBUG nova.compute.manager [req-7739ca9f-433d-4173-b273-a56f4de771e4 req-8342070c-5263-4cda-bf90-021a1dc5b873 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Refreshing instance network info cache due to event network-changed-037e7926-5eac-487b-a2fa-4124ead561df. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1805.122594] env[62519]: DEBUG oslo_concurrency.lockutils [req-7739ca9f-433d-4173-b273-a56f4de771e4 req-8342070c-5263-4cda-bf90-021a1dc5b873 service nova] Acquiring lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.122636] env[62519]: DEBUG oslo_concurrency.lockutils [req-7739ca9f-433d-4173-b273-a56f4de771e4 req-8342070c-5263-4cda-bf90-021a1dc5b873 service nova] Acquired lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.122795] env[62519]: DEBUG nova.network.neutron [req-7739ca9f-433d-4173-b273-a56f4de771e4 req-8342070c-5263-4cda-bf90-021a1dc5b873 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Refreshing network info cache for port 037e7926-5eac-487b-a2fa-4124ead561df {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1805.129623] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d51aba-1a56-455c-944a-004ee84d1c76 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.139260] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a397341e-240b-43eb-9172-a22845e8a020 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.174173] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eabb9e9-ce55-407a-a076-3958f9cadc62 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.183209] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da03cb3-a262-4e26-a29b-986ddf155a3c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.199186] env[62519]: DEBUG nova.compute.provider_tree [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1805.213220] env[62519]: DEBUG nova.network.neutron [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Successfully created port: 59a0726b-5696-46b1-88ef-7b165b586888 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1805.231629] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.254370] env[62519]: DEBUG nova.compute.manager [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1805.429835] env[62519]: INFO nova.compute.manager [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Detaching volume 38c2442b-9ab6-4c99-96ff-e9c0c22ab09d [ 1805.447024] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.477663] env[62519]: INFO nova.virt.block_device [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Attempting to driver detach volume 38c2442b-9ab6-4c99-96ff-e9c0c22ab09d from mountpoint /dev/sdb [ 1805.477663] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1805.477663] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373782', 'volume_id': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'name': 'volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '99f22198-1a65-4d0d-b665-90c7063dbdb9', 'attached_at': '', 'detached_at': '', 'volume_id': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'serial': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1805.478718] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e440989-992f-49e8-b00b-ef9fa7f5f1be {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.505516] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6504b65-b1e6-4e53-ab44-8e1ec5051dee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.513829] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f2ea62-288c-4787-bfe3-bd2506c2aafd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.542298] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5c119b-f257-428c-b8f8-106b586d12dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.562474] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] The volume has not been displaced from its original location: [datastore1] volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d/volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1805.567679] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Reconfiguring VM instance instance-0000003e to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1805.567934] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2618b8e2-28a5-4e71-9160-6d5a5f5a8a51 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.587331] env[62519]: DEBUG oslo_vmware.api [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1805.587331] env[62519]: value = "task-1803013" [ 1805.587331] env[62519]: _type = "Task" [ 1805.587331] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.596204] env[62519]: DEBUG oslo_vmware.api [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803013, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.626942] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 22380aef-c725-43a0-a957-06ced9518c21] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1805.705112] env[62519]: DEBUG nova.scheduler.client.report [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1805.735649] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803012, 'name': ReconfigVM_Task, 'duration_secs': 0.707593} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.735649] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 5da884af-d8d2-409b-99bd-e5370e44e9f0/5da884af-d8d2-409b-99bd-e5370e44e9f0.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1805.736049] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance '5da884af-d8d2-409b-99bd-e5370e44e9f0' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1805.867887] env[62519]: DEBUG nova.network.neutron [req-7739ca9f-433d-4173-b273-a56f4de771e4 req-8342070c-5263-4cda-bf90-021a1dc5b873 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updated VIF entry in instance network info cache for port 037e7926-5eac-487b-a2fa-4124ead561df. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1805.868255] env[62519]: DEBUG nova.network.neutron [req-7739ca9f-433d-4173-b273-a56f4de771e4 req-8342070c-5263-4cda-bf90-021a1dc5b873 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updating instance_info_cache with network_info: [{"id": "037e7926-5eac-487b-a2fa-4124ead561df", "address": "fa:16:3e:f0:b9:0e", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e7926-5e", "ovs_interfaceid": "037e7926-5eac-487b-a2fa-4124ead561df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.886791] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093eb8f0-6a4b-4135-a0a0-fa19e327e07c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.979s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.944250] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.101039] env[62519]: DEBUG oslo_vmware.api [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803013, 'name': ReconfigVM_Task, 'duration_secs': 0.394006} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.102318] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Reconfigured VM instance instance-0000003e to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1806.106712] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81f24b63-da9d-4f89-a4e1-7ba80c992111 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.123666] env[62519]: DEBUG oslo_vmware.api [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1806.123666] env[62519]: value = "task-1803014" [ 1806.123666] env[62519]: _type = "Task" [ 1806.123666] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.133069] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 09eefc1a-011b-4d2c-ab75-a1fcee740907] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1806.135757] env[62519]: DEBUG oslo_vmware.api [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.210330] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.970s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.212653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.274s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.214165] env[62519]: INFO nova.compute.claims [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1806.242678] env[62519]: INFO nova.scheduler.client.report [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Deleted allocations for instance 4e7db12e-c7f7-4d2a-b797-1371fc839a9e [ 1806.249994] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdba644-d42c-4f4c-9fb3-a40e39914cff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.271728] env[62519]: DEBUG nova.compute.manager [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1806.275091] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662c8480-e55a-44ed-b24d-f9d07564b7c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.295047] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance '5da884af-d8d2-409b-99bd-e5370e44e9f0' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1806.306539] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1806.306802] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1806.306980] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1806.307347] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1806.307347] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1806.307570] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1806.307707] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1806.307838] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1806.308013] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1806.308188] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1806.308358] env[62519]: DEBUG nova.virt.hardware [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1806.310329] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe269b54-7cb9-4d46-9702-1c54407c5e53 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.320333] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bb04e8-1315-4452-ae84-53f5782ce09d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.370878] env[62519]: DEBUG oslo_concurrency.lockutils [req-7739ca9f-433d-4173-b273-a56f4de771e4 req-8342070c-5263-4cda-bf90-021a1dc5b873 service nova] Releasing lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.444944] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.634374] env[62519]: DEBUG oslo_vmware.api [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803014, 'name': ReconfigVM_Task, 'duration_secs': 0.162887} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.634567] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373782', 'volume_id': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'name': 'volume-38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '99f22198-1a65-4d0d-b665-90c7063dbdb9', 'attached_at': '', 'detached_at': '', 'volume_id': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d', 'serial': '38c2442b-9ab6-4c99-96ff-e9c0c22ab09d'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1806.637559] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 3b506d10-a427-47b8-ab5f-c35e450b7eb1] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1806.755649] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a98c3ed1-4d3d-4762-b887-532cdd3075e3 tempest-ServerPasswordTestJSON-1830489623 tempest-ServerPasswordTestJSON-1830489623-project-member] Lock "4e7db12e-c7f7-4d2a-b797-1371fc839a9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.931s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.842858] env[62519]: DEBUG nova.network.neutron [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Port f8e70085-af0c-4731-8b61-cf5cd2460344 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1806.954184] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.986344] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1806.986623] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1806.986791] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Deleting the datastore file [datastore1] bace23b3-b7f4-4f3b-8986-0076440d096d {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1806.987126] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cac22b72-6de3-40c1-8d44-eb2a44fcbce2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.995023] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for the task: (returnval){ [ 1806.995023] env[62519]: value = "task-1803015" [ 1806.995023] env[62519]: _type = "Task" [ 1806.995023] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.005149] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1803015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.077139] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1807.077399] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373810', 'volume_id': '9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'name': 'volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '46b3a0fb-29f6-4b66-a091-2d125b69d109', 'attached_at': '', 'detached_at': '', 'volume_id': '9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'serial': '9c0a59d7-6968-4064-b0ab-53e4c74275f5'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1807.078386] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41410ffc-6be3-4851-939a-d4a5aab3f7c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.108243] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbcad23-9406-4c8d-a599-0fa5efbd2e3d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.136455] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5/volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1807.136942] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c22dc05-ef4f-4910-9eb9-cd3aef0f7fe5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.151121] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: a59be5e6-2316-4766-933a-4d01dfe4fec1] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1807.158966] env[62519]: DEBUG oslo_vmware.api [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1807.158966] env[62519]: value = "task-1803016" [ 1807.158966] env[62519]: _type = "Task" [ 1807.158966] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.168511] env[62519]: DEBUG oslo_vmware.api [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803016, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.200096] env[62519]: DEBUG nova.objects.instance [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1807.447733] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1802996, 'name': CreateVM_Task, 'duration_secs': 6.290642} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.448395] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1807.449541] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.449740] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.450183] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1807.450535] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5ebd336-00cc-4887-9f2c-1b3a836fdd79 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.459112] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1807.459112] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5209cde0-fb75-9b76-f781-4d5581ad260d" [ 1807.459112] env[62519]: _type = "Task" [ 1807.459112] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.473053] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5209cde0-fb75-9b76-f781-4d5581ad260d, 'name': SearchDatastore_Task, 'duration_secs': 0.00993} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.473402] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.473571] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1807.474183] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.474362] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.475033] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1807.475033] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53427c54-1011-4dc3-b8be-20d9458918f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.483864] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1807.484059] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1807.484779] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b87d9dcc-a8b7-4efb-ae44-71ce241ec268 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.491029] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1807.491029] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a74422-3832-a56f-8599-0f15d0fa377e" [ 1807.491029] env[62519]: _type = "Task" [ 1807.491029] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.503107] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a74422-3832-a56f-8599-0f15d0fa377e, 'name': SearchDatastore_Task, 'duration_secs': 0.009407} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.504304] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efc108e6-716f-4235-89e0-49dd80e80eb8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.509649] env[62519]: DEBUG oslo_vmware.api [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Task: {'id': task-1803015, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104231} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.512562] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1807.512764] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1807.512932] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1807.515769] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1807.515769] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521d186e-b4e4-c889-eafe-1ce3083a3c24" [ 1807.515769] env[62519]: _type = "Task" [ 1807.515769] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.526095] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521d186e-b4e4-c889-eafe-1ce3083a3c24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.553629] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a334f94-66ae-49b5-a0ec-bf91f31d3312 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.561846] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b93d21-ac86-4259-a332-f130fd2703fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.596718] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23f0483-774e-4dd4-b308-3f75c9958ca9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.600972] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1807.600972] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b10ff9fd-c48c-44d9-a306-f12663a76dc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.609929] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55136ecd-65b5-42d7-9371-5372108f89d8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.616326] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98704d2-4e52-40dd-a0dc-10a7f340954e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.639375] env[62519]: DEBUG nova.compute.provider_tree [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.655238] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 2b68e95a-1d93-43ee-b0a6-996c9042f5c7] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1807.658702] env[62519]: ERROR nova.compute.manager [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Failed to detach volume f0e225ed-ae23-4580-9ecb-e74214791c63 from /dev/sda: nova.exception.InstanceNotFound: Instance bace23b3-b7f4-4f3b-8986-0076440d096d could not be found. [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Traceback (most recent call last): [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 4194, in _do_rebuild_instance [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self.driver.rebuild(**kwargs) [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] raise NotImplementedError() [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] NotImplementedError [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] During handling of the above exception, another exception occurred: [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Traceback (most recent call last): [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _detach_root_volume [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self.driver.detach_volume(context, old_connection_info, [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] return self._volumeops.detach_volume(connection_info, instance) [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self._detach_volume_vmdk(connection_info, instance) [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] stable_ref.fetch_moref(session) [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] nova.exception.InstanceNotFound: Instance bace23b3-b7f4-4f3b-8986-0076440d096d could not be found. [ 1807.658702] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] [ 1807.663202] env[62519]: DEBUG nova.scheduler.client.report [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1807.679652] env[62519]: DEBUG oslo_vmware.api [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803016, 'name': ReconfigVM_Task, 'duration_secs': 0.404496} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.679955] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Reconfigured VM instance instance-0000003a to attach disk [datastore1] volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5/volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1807.685289] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fe17c94-aa06-4a9c-8339-822ff334105a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.708268] env[62519]: DEBUG oslo_vmware.api [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1807.708268] env[62519]: value = "task-1803017" [ 1807.708268] env[62519]: _type = "Task" [ 1807.708268] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.720805] env[62519]: DEBUG oslo_vmware.api [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803017, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.841641] env[62519]: DEBUG nova.compute.utils [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Build of instance bace23b3-b7f4-4f3b-8986-0076440d096d aborted: Failed to rebuild volume backed instance. {{(pid=62519) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1807.843716] env[62519]: ERROR nova.compute.manager [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance bace23b3-b7f4-4f3b-8986-0076440d096d aborted: Failed to rebuild volume backed instance. [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Traceback (most recent call last): [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 4194, in _do_rebuild_instance [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self.driver.rebuild(**kwargs) [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] raise NotImplementedError() [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] NotImplementedError [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] During handling of the above exception, another exception occurred: [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Traceback (most recent call last): [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 3651, in _rebuild_volume_backed_instance [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self._detach_root_volume(context, instance, root_bdm) [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 3630, in _detach_root_volume [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] with excutils.save_and_reraise_exception(): [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self.force_reraise() [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] raise self.value [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _detach_root_volume [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self.driver.detach_volume(context, old_connection_info, [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] return self._volumeops.detach_volume(connection_info, instance) [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self._detach_volume_vmdk(connection_info, instance) [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] stable_ref.fetch_moref(session) [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] nova.exception.InstanceNotFound: Instance bace23b3-b7f4-4f3b-8986-0076440d096d could not be found. [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] During handling of the above exception, another exception occurred: [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Traceback (most recent call last): [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 11355, in _error_out_instance_on_exception [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] yield [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 3920, in rebuild_instance [ 1807.843716] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self._do_rebuild_instance_with_claim( [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 4006, in _do_rebuild_instance_with_claim [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self._do_rebuild_instance( [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 4198, in _do_rebuild_instance [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self._rebuild_default_impl(**kwargs) [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 3774, in _rebuild_default_impl [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] self._rebuild_volume_backed_instance( [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] File "/opt/stack/nova/nova/compute/manager.py", line 3666, in _rebuild_volume_backed_instance [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] raise exception.BuildAbortException( [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] nova.exception.BuildAbortException: Build of instance bace23b3-b7f4-4f3b-8986-0076440d096d aborted: Failed to rebuild volume backed instance. [ 1807.845568] env[62519]: ERROR nova.compute.manager [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] [ 1807.870730] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "5da884af-d8d2-409b-99bd-e5370e44e9f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.870984] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.872460] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.027825] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521d186e-b4e4-c889-eafe-1ce3083a3c24, 'name': SearchDatastore_Task, 'duration_secs': 0.009464} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.028173] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.028388] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2bc8f11e-82fc-4acb-945e-15327c133920/2bc8f11e-82fc-4acb-945e-15327c133920.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1808.028660] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-033d4c44-f9de-4e9c-94cf-7d04fb9828ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.036363] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1808.036363] env[62519]: value = "task-1803018" [ 1808.036363] env[62519]: _type = "Task" [ 1808.036363] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.045490] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803018, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.157946] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 51eaac08-75fd-49f9-9b1a-cc2a2d799634] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1808.167282] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.955s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.167827] env[62519]: DEBUG nova.compute.manager [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1808.170862] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.079s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.173152] env[62519]: INFO nova.compute.claims [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1808.176666] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.217851] env[62519]: DEBUG oslo_concurrency.lockutils [None req-28584ffa-62b0-4b86-a652-0c3e2c7943e5 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.290s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.218667] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.043s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.219603] env[62519]: DEBUG nova.compute.manager [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1808.220600] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cbdc2d-7c61-4972-8227-9239024c6a22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.231567] env[62519]: DEBUG oslo_vmware.api [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803017, 'name': ReconfigVM_Task, 'duration_secs': 0.149407} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.231567] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373810', 'volume_id': '9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'name': 'volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '46b3a0fb-29f6-4b66-a091-2d125b69d109', 'attached_at': '', 'detached_at': '', 'volume_id': '9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'serial': '9c0a59d7-6968-4064-b0ab-53e4c74275f5'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1808.235058] env[62519]: DEBUG nova.compute.manager [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1808.235921] env[62519]: DEBUG nova.objects.instance [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1808.293169] env[62519]: DEBUG nova.compute.manager [req-6e0d727a-f5da-40e0-80f2-144c39191357 req-7f886638-92b2-43fd-9804-f6ebba400865 service nova] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Received event network-vif-plugged-6541f904-9576-48c8-972b-84003d7a5315 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1808.293169] env[62519]: DEBUG oslo_concurrency.lockutils [req-6e0d727a-f5da-40e0-80f2-144c39191357 req-7f886638-92b2-43fd-9804-f6ebba400865 service nova] Acquiring lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.293169] env[62519]: DEBUG oslo_concurrency.lockutils [req-6e0d727a-f5da-40e0-80f2-144c39191357 req-7f886638-92b2-43fd-9804-f6ebba400865 service nova] Lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.293169] env[62519]: DEBUG oslo_concurrency.lockutils [req-6e0d727a-f5da-40e0-80f2-144c39191357 req-7f886638-92b2-43fd-9804-f6ebba400865 service nova] Lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.293169] env[62519]: DEBUG nova.compute.manager [req-6e0d727a-f5da-40e0-80f2-144c39191357 req-7f886638-92b2-43fd-9804-f6ebba400865 service nova] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] No waiting events found dispatching network-vif-plugged-6541f904-9576-48c8-972b-84003d7a5315 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1808.293374] env[62519]: WARNING nova.compute.manager [req-6e0d727a-f5da-40e0-80f2-144c39191357 req-7f886638-92b2-43fd-9804-f6ebba400865 service nova] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Received unexpected event network-vif-plugged-6541f904-9576-48c8-972b-84003d7a5315 for instance with vm_state building and task_state spawning. [ 1808.483614] env[62519]: DEBUG nova.network.neutron [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Successfully updated port: 6541f904-9576-48c8-972b-84003d7a5315 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1808.550380] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803018, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.629224] env[62519]: DEBUG nova.network.neutron [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Successfully updated port: 59a0726b-5696-46b1-88ef-7b165b586888 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1808.662022] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 1d4b14d3-8832-457e-aaed-462236555f57] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1808.679034] env[62519]: DEBUG nova.compute.utils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1808.683091] env[62519]: DEBUG nova.compute.manager [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1808.683712] env[62519]: DEBUG nova.network.neutron [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1808.767936] env[62519]: DEBUG nova.policy [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '053a30aaf81b4cbd8ced7018ebfe1f40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e755fb5a6e94068b6c99b1638081f5f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1808.936973] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.937209] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.939011] env[62519]: DEBUG nova.network.neutron [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1808.991757] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquiring lock "refresh_cache-1c4615c7-d145-4529-98bd-1ae3ed51e1b5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.991757] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquired lock "refresh_cache-1c4615c7-d145-4529-98bd-1ae3ed51e1b5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.991757] env[62519]: DEBUG nova.network.neutron [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1809.050818] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803018, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70389} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.050818] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 2bc8f11e-82fc-4acb-945e-15327c133920/2bc8f11e-82fc-4acb-945e-15327c133920.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1809.050818] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1809.050818] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92da4562-0f5c-4f57-9879-637f01e13d63 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.058328] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1809.058328] env[62519]: value = "task-1803019" [ 1809.058328] env[62519]: _type = "Task" [ 1809.058328] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.073017] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803019, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.132900] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "refresh_cache-71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.133078] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired lock "refresh_cache-71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.133239] env[62519]: DEBUG nova.network.neutron [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1809.165907] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: f59a31e4-7fb9-4de7-b35f-da811a305f85] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1809.181046] env[62519]: DEBUG nova.compute.manager [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1809.249107] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1809.249341] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-280abeab-706c-4fed-971d-ee47b82f3610 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.259175] env[62519]: DEBUG oslo_vmware.api [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1809.259175] env[62519]: value = "task-1803020" [ 1809.259175] env[62519]: _type = "Task" [ 1809.259175] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.275169] env[62519]: DEBUG oslo_vmware.api [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.291783] env[62519]: DEBUG nova.objects.instance [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'flavor' on Instance uuid 46b3a0fb-29f6-4b66-a091-2d125b69d109 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1809.298189] env[62519]: DEBUG nova.network.neutron [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Successfully created port: 24a7965e-a68b-4ffc-b5ab-d1bbaf657f40 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1809.548367] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e200c8-b13a-4ed6-9304-33fcaf45f798 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.558527] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8430cbd9-0692-4ab3-9b82-8a31dbd4277e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.572462] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803019, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114188} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.573222] env[62519]: DEBUG nova.network.neutron [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1809.601799] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1809.605477] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ba58b3-b1c5-4cbb-8b5c-0ca8bbce3d64 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.609675] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ebec93-61cb-4cb4-8c8e-8b1777646e3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.635484] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a478832-aedc-43b5-ba89-d72eac9b3cdb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.651697] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 2bc8f11e-82fc-4acb-945e-15327c133920/2bc8f11e-82fc-4acb-945e-15327c133920.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1809.658187] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2fd8b78-1d8d-4d17-ae69-ecab6db88ae1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.673267] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: cfefa7c8-4986-4ad0-ac20-8784ee44a737] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1809.693021] env[62519]: DEBUG nova.compute.provider_tree [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.693195] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1809.693195] env[62519]: value = "task-1803021" [ 1809.693195] env[62519]: _type = "Task" [ 1809.693195] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.703172] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803021, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.770249] env[62519]: DEBUG oslo_vmware.api [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803020, 'name': PowerOffVM_Task, 'duration_secs': 0.253943} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.771430] env[62519]: DEBUG nova.network.neutron [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1809.773862] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1809.773862] env[62519]: DEBUG nova.compute.manager [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1809.776648] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c325deb-6b8f-4abe-a5cd-71ca5514028d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.804029] env[62519]: DEBUG oslo_concurrency.lockutils [None req-97cffebd-23d9-4e9a-84e7-8e33185957ef tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.860s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.047494] env[62519]: DEBUG nova.network.neutron [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance_info_cache with network_info: [{"id": "f8e70085-af0c-4731-8b61-cf5cd2460344", "address": "fa:16:3e:4f:98:8a", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8e70085-af", "ovs_interfaceid": "f8e70085-af0c-4731-8b61-cf5cd2460344", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.059592] env[62519]: DEBUG nova.network.neutron [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Updating instance_info_cache with network_info: [{"id": "6541f904-9576-48c8-972b-84003d7a5315", "address": "fa:16:3e:98:b4:3d", "network": {"id": "62a33531-01c8-4b73-86c8-26c8e981a7d8", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-59086432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d6c914bac4e4293ade25fd3da4576e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6541f904-95", "ovs_interfaceid": "6541f904-9576-48c8-972b-84003d7a5315", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.103547] env[62519]: DEBUG nova.network.neutron [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Updating instance_info_cache with network_info: [{"id": "59a0726b-5696-46b1-88ef-7b165b586888", "address": "fa:16:3e:f9:06:41", "network": {"id": "b50c1a89-c14c-481c-b5d9-748f18863a45", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-698120386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9808ad7e38e34658aac06ebc932b0e32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59a0726b-56", "ovs_interfaceid": "59a0726b-5696-46b1-88ef-7b165b586888", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.178027] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 40c7a9b8-d541-464a-ba87-76cfc183ae31] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1810.191418] env[62519]: DEBUG nova.compute.manager [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1810.196300] env[62519]: DEBUG nova.scheduler.client.report [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1810.221217] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803021, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.236950] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1810.237200] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1810.237359] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1810.237537] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1810.237683] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1810.237827] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1810.238262] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1810.238493] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1810.238791] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1810.238924] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1810.239185] env[62519]: DEBUG nova.virt.hardware [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1810.240355] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ef59af-5dd9-4e73-9e68-b1656def7143 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.249761] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c9c117-8788-4f69-a7d9-a5b88e982efb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.289193] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5f17812-6998-4e4a-bf0d-3ce312feade3 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.070s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.333170] env[62519]: DEBUG nova.compute.manager [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Received event network-vif-plugged-59a0726b-5696-46b1-88ef-7b165b586888 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1810.333170] env[62519]: DEBUG oslo_concurrency.lockutils [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] Acquiring lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.333555] env[62519]: DEBUG oslo_concurrency.lockutils [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.333906] env[62519]: DEBUG oslo_concurrency.lockutils [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.334722] env[62519]: DEBUG nova.compute.manager [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] No waiting events found dispatching network-vif-plugged-59a0726b-5696-46b1-88ef-7b165b586888 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1810.335255] env[62519]: WARNING nova.compute.manager [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Received unexpected event network-vif-plugged-59a0726b-5696-46b1-88ef-7b165b586888 for instance with vm_state building and task_state spawning. [ 1810.336476] env[62519]: DEBUG nova.compute.manager [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Received event network-changed-6541f904-9576-48c8-972b-84003d7a5315 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1810.336476] env[62519]: DEBUG nova.compute.manager [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Refreshing instance network info cache due to event network-changed-6541f904-9576-48c8-972b-84003d7a5315. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1810.336476] env[62519]: DEBUG oslo_concurrency.lockutils [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] Acquiring lock "refresh_cache-1c4615c7-d145-4529-98bd-1ae3ed51e1b5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.549925] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.562800] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Releasing lock "refresh_cache-1c4615c7-d145-4529-98bd-1ae3ed51e1b5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.563126] env[62519]: DEBUG nova.compute.manager [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Instance network_info: |[{"id": "6541f904-9576-48c8-972b-84003d7a5315", "address": "fa:16:3e:98:b4:3d", "network": {"id": "62a33531-01c8-4b73-86c8-26c8e981a7d8", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-59086432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d6c914bac4e4293ade25fd3da4576e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6541f904-95", "ovs_interfaceid": "6541f904-9576-48c8-972b-84003d7a5315", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1810.563437] env[62519]: DEBUG oslo_concurrency.lockutils [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] Acquired lock "refresh_cache-1c4615c7-d145-4529-98bd-1ae3ed51e1b5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.563613] env[62519]: DEBUG nova.network.neutron [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Refreshing network info cache for port 6541f904-9576-48c8-972b-84003d7a5315 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1810.564829] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:b4:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a88b707-352e-4be7-b1d6-ad6074b40ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6541f904-9576-48c8-972b-84003d7a5315', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1810.572670] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Creating folder: Project (1d6c914bac4e4293ade25fd3da4576e7). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1810.573243] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9d900e7-053d-4446-8ed8-3d4779a8c1bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.587117] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Created folder: Project (1d6c914bac4e4293ade25fd3da4576e7) in parent group-v373567. [ 1810.587331] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Creating folder: Instances. Parent ref: group-v373811. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1810.587573] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9dd8033-84b9-4141-851e-f1a90e51f56d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.599240] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Created folder: Instances in parent group-v373811. [ 1810.599513] env[62519]: DEBUG oslo.service.loopingcall [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1810.599718] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1810.599955] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbd0f86b-b991-485a-b2cc-14a710ab08ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.617365] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Releasing lock "refresh_cache-71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.617655] env[62519]: DEBUG nova.compute.manager [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Instance network_info: |[{"id": "59a0726b-5696-46b1-88ef-7b165b586888", "address": "fa:16:3e:f9:06:41", "network": {"id": "b50c1a89-c14c-481c-b5d9-748f18863a45", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-698120386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9808ad7e38e34658aac06ebc932b0e32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59a0726b-56", "ovs_interfaceid": "59a0726b-5696-46b1-88ef-7b165b586888", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1810.618344] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:06:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59a0726b-5696-46b1-88ef-7b165b586888', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1810.625756] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Creating folder: Project (9808ad7e38e34658aac06ebc932b0e32). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1810.626670] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aca28cd3-ad46-44df-bbdd-7fb7881ed20d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.633168] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1810.633168] env[62519]: value = "task-1803024" [ 1810.633168] env[62519]: _type = "Task" [ 1810.633168] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.637659] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Created folder: Project (9808ad7e38e34658aac06ebc932b0e32) in parent group-v373567. [ 1810.637884] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Creating folder: Instances. Parent ref: group-v373813. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1810.638502] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81d72948-e2af-485a-a6c6-22040d95f3a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.644022] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803024, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.647218] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Created folder: Instances in parent group-v373813. [ 1810.647446] env[62519]: DEBUG oslo.service.loopingcall [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1810.647632] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1810.647832] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb8d9865-b902-4964-9c61-4f52cbf34466 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.670486] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1810.670486] env[62519]: value = "task-1803027" [ 1810.670486] env[62519]: _type = "Task" [ 1810.670486] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.682682] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 1462d213-3f9a-4c60-8056-0b68f20a4939] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1810.684535] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803027, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.704177] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.704177] env[62519]: DEBUG nova.compute.manager [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1810.716836] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803021, 'name': ReconfigVM_Task, 'duration_secs': 0.562586} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.717784] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 2bc8f11e-82fc-4acb-945e-15327c133920/2bc8f11e-82fc-4acb-945e-15327c133920.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1810.718513] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c071302c-b2d0-4118-b8ee-3e48a0982d9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.727427] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1810.727427] env[62519]: value = "task-1803028" [ 1810.727427] env[62519]: _type = "Task" [ 1810.727427] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.739202] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803028, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.875397] env[62519]: DEBUG oslo_concurrency.lockutils [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.875708] env[62519]: DEBUG oslo_concurrency.lockutils [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.080149] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794da38f-9297-46f1-816e-abbd5e031728 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.083852] env[62519]: DEBUG nova.objects.instance [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1811.111568] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12af3b5f-c8fb-49b2-a7f5-03ed7815e68e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.128275] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance '5da884af-d8d2-409b-99bd-e5370e44e9f0' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1811.147122] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803024, 'name': CreateVM_Task, 'duration_secs': 0.490817} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.147299] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1811.148355] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.148355] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.148611] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1811.149302] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0c7aa20-3812-4e30-8818-55025a77d4a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.155314] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1811.155314] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f6ae57-0081-16a2-5469-b0f314dc8c82" [ 1811.155314] env[62519]: _type = "Task" [ 1811.155314] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.165015] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f6ae57-0081-16a2-5469-b0f314dc8c82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.180179] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803027, 'name': CreateVM_Task, 'duration_secs': 0.44987} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.182833] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1811.184069] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.185258] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: f7d5c77d-6c78-4969-b511-2b03ab624c84] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1811.206515] env[62519]: DEBUG nova.network.neutron [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Successfully updated port: 24a7965e-a68b-4ffc-b5ab-d1bbaf657f40 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1811.210791] env[62519]: DEBUG nova.compute.utils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1811.216788] env[62519]: DEBUG nova.compute.manager [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1811.216788] env[62519]: DEBUG nova.network.neutron [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1811.244096] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803028, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.252018] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519eb57d-4b88-426c-9897-17852c2fb51f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.258046] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d2934e-c298-4233-9e00-cfadcfe10084 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.295672] env[62519]: DEBUG nova.policy [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '710cbaa277014d8389c2c24cd739dbde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '448555031bb64aefafd0fcc67f4df10a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1811.298196] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b86e25-ed21-4d0a-a0b2-6eddac6994a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.309825] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2c67ee-0d21-4505-84d9-cc4264ba4116 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.326395] env[62519]: DEBUG nova.compute.provider_tree [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1811.482172] env[62519]: DEBUG nova.network.neutron [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Updated VIF entry in instance network info cache for port 6541f904-9576-48c8-972b-84003d7a5315. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1811.483044] env[62519]: DEBUG nova.network.neutron [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Updating instance_info_cache with network_info: [{"id": "6541f904-9576-48c8-972b-84003d7a5315", "address": "fa:16:3e:98:b4:3d", "network": {"id": "62a33531-01c8-4b73-86c8-26c8e981a7d8", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-59086432-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d6c914bac4e4293ade25fd3da4576e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6541f904-95", "ovs_interfaceid": "6541f904-9576-48c8-972b-84003d7a5315", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.591185] env[62519]: DEBUG oslo_concurrency.lockutils [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.591185] env[62519]: DEBUG oslo_concurrency.lockutils [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.591185] env[62519]: DEBUG nova.network.neutron [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1811.591185] env[62519]: DEBUG nova.objects.instance [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'info_cache' on Instance uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1811.637412] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1811.637412] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b991653e-de52-4bcb-858b-a6511df8f1dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.648298] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1811.648298] env[62519]: value = "task-1803029" [ 1811.648298] env[62519]: _type = "Task" [ 1811.648298] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.652857] env[62519]: DEBUG nova.network.neutron [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Successfully created port: 7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1811.661764] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.669189] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f6ae57-0081-16a2-5469-b0f314dc8c82, 'name': SearchDatastore_Task, 'duration_secs': 0.010653} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.669513] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.669819] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1811.670192] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.670293] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.670474] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1811.670777] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.671051] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1811.671345] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae9f1f03-def0-4c22-9da2-975fd4c00de3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.673505] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-381daaf9-2e49-4d08-ac92-c02d6f0c1055 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.680352] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1811.680352] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d6ec63-6acf-1ddc-4756-8575edded279" [ 1811.680352] env[62519]: _type = "Task" [ 1811.680352] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.686623] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1811.686827] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1811.690783] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4a29bff8-050a-4ad5-9d06-3a59c40b97ee] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1811.692525] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99ccedee-1963-4a07-81f4-0510e483f8eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.696719] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d6ec63-6acf-1ddc-4756-8575edded279, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.700835] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1811.700835] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524575aa-f298-548f-3535-eab7e2d2478c" [ 1811.700835] env[62519]: _type = "Task" [ 1811.700835] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.710772] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "refresh_cache-21daee7a-1788-4a1c-969e-65b696702bf2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.710917] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "refresh_cache-21daee7a-1788-4a1c-969e-65b696702bf2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.711076] env[62519]: DEBUG nova.network.neutron [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1811.712441] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524575aa-f298-548f-3535-eab7e2d2478c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.720988] env[62519]: DEBUG nova.compute.manager [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1811.738137] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803028, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.829358] env[62519]: DEBUG nova.scheduler.client.report [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1811.987728] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Acquiring lock "bace23b3-b7f4-4f3b-8986-0076440d096d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.987994] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "bace23b3-b7f4-4f3b-8986-0076440d096d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.988711] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Acquiring lock "bace23b3-b7f4-4f3b-8986-0076440d096d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.988917] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "bace23b3-b7f4-4f3b-8986-0076440d096d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.989143] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "bace23b3-b7f4-4f3b-8986-0076440d096d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.994908] env[62519]: DEBUG oslo_concurrency.lockutils [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] Releasing lock "refresh_cache-1c4615c7-d145-4529-98bd-1ae3ed51e1b5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.995165] env[62519]: DEBUG nova.compute.manager [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Received event network-changed-59a0726b-5696-46b1-88ef-7b165b586888 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1811.995497] env[62519]: DEBUG nova.compute.manager [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Refreshing instance network info cache due to event network-changed-59a0726b-5696-46b1-88ef-7b165b586888. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1811.995572] env[62519]: DEBUG oslo_concurrency.lockutils [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] Acquiring lock "refresh_cache-71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.995667] env[62519]: DEBUG oslo_concurrency.lockutils [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] Acquired lock "refresh_cache-71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.995826] env[62519]: DEBUG nova.network.neutron [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Refreshing network info cache for port 59a0726b-5696-46b1-88ef-7b165b586888 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1811.997214] env[62519]: INFO nova.compute.manager [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Terminating instance [ 1812.096581] env[62519]: DEBUG nova.objects.base [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Object Instance<99f22198-1a65-4d0d-b665-90c7063dbdb9> lazy-loaded attributes: flavor,info_cache {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1812.164041] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803029, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.194238] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d6ec63-6acf-1ddc-4756-8575edded279, 'name': SearchDatastore_Task, 'duration_secs': 0.030671} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.194505] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.194731] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1812.194994] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.198422] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4c336ad1-8ce6-4f89-843e-0baae0d0dbda] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1812.212715] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524575aa-f298-548f-3535-eab7e2d2478c, 'name': SearchDatastore_Task, 'duration_secs': 0.009698} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.212715] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-745bb783-311d-4491-b149-866a69122279 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.221054] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1812.221054] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e560fa-0430-3de3-9762-a6e84aefd0c5" [ 1812.221054] env[62519]: _type = "Task" [ 1812.221054] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.237468] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e560fa-0430-3de3-9762-a6e84aefd0c5, 'name': SearchDatastore_Task, 'duration_secs': 0.010883} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.238689] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.238963] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1c4615c7-d145-4529-98bd-1ae3ed51e1b5/1c4615c7-d145-4529-98bd-1ae3ed51e1b5.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1812.239260] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.239447] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1812.239749] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d35de6c7-4290-4d87-a531-9d012ebdab9e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.246760] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcfa9d8c-8898-4ce5-8772-bfd6766eda06 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.248816] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803028, 'name': Rename_Task, 'duration_secs': 1.254797} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.249439] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1812.249952] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bab11a60-fe14-495b-a386-2416e03e842a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.255613] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1812.255613] env[62519]: value = "task-1803030" [ 1812.255613] env[62519]: _type = "Task" [ 1812.255613] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.263435] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1812.263435] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1812.263435] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1812.263435] env[62519]: value = "task-1803031" [ 1812.263435] env[62519]: _type = "Task" [ 1812.263435] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.264093] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d95b7208-807b-452d-8cda-3399fad6d244 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.275423] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803030, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.283649] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803031, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.285522] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1812.285522] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520e98ed-853c-c805-81d9-b071522f6886" [ 1812.285522] env[62519]: _type = "Task" [ 1812.285522] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.299390] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520e98ed-853c-c805-81d9-b071522f6886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.305188] env[62519]: DEBUG nova.network.neutron [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1812.337120] env[62519]: DEBUG oslo_concurrency.lockutils [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.461s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.337120] env[62519]: INFO nova.compute.manager [None req-888de478-1e31-43ae-bdd7-d7e8bcd57639 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Successfully reverted task state from rebuilding on failure for instance. [ 1812.375288] env[62519]: DEBUG nova.compute.manager [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Received event network-vif-plugged-24a7965e-a68b-4ffc-b5ab-d1bbaf657f40 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1812.375455] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] Acquiring lock "21daee7a-1788-4a1c-969e-65b696702bf2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.375816] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] Lock "21daee7a-1788-4a1c-969e-65b696702bf2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.376260] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] Lock "21daee7a-1788-4a1c-969e-65b696702bf2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.376723] env[62519]: DEBUG nova.compute.manager [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] No waiting events found dispatching network-vif-plugged-24a7965e-a68b-4ffc-b5ab-d1bbaf657f40 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1812.376960] env[62519]: WARNING nova.compute.manager [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Received unexpected event network-vif-plugged-24a7965e-a68b-4ffc-b5ab-d1bbaf657f40 for instance with vm_state building and task_state spawning. [ 1812.377358] env[62519]: DEBUG nova.compute.manager [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Received event network-changed-24a7965e-a68b-4ffc-b5ab-d1bbaf657f40 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1812.377532] env[62519]: DEBUG nova.compute.manager [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Refreshing instance network info cache due to event network-changed-24a7965e-a68b-4ffc-b5ab-d1bbaf657f40. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1812.377710] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] Acquiring lock "refresh_cache-21daee7a-1788-4a1c-969e-65b696702bf2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.506520] env[62519]: DEBUG nova.compute.manager [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1812.506662] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8e40db7-6612-400d-ac88-8de35f21d645 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.518406] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435d84f7-825b-47ed-a547-a80fb7fa113c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.565995] env[62519]: WARNING nova.virt.vmwareapi.driver [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance bace23b3-b7f4-4f3b-8986-0076440d096d could not be found. [ 1812.566232] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1812.569122] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f5569f1-2a3a-4dea-ad72-b004dbe76ab8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.572257] env[62519]: DEBUG nova.network.neutron [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Updating instance_info_cache with network_info: [{"id": "24a7965e-a68b-4ffc-b5ab-d1bbaf657f40", "address": "fa:16:3e:20:6c:4a", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24a7965e-a6", "ovs_interfaceid": "24a7965e-a68b-4ffc-b5ab-d1bbaf657f40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.584015] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fa97af-1404-4b46-a287-17f41ddad650 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.636854] env[62519]: WARNING nova.virt.vmwareapi.vmops [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bace23b3-b7f4-4f3b-8986-0076440d096d could not be found. [ 1812.637437] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1812.637697] env[62519]: INFO nova.compute.manager [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Took 0.13 seconds to destroy the instance on the hypervisor. [ 1812.637994] env[62519]: DEBUG oslo.service.loopingcall [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1812.638330] env[62519]: DEBUG nova.compute.manager [-] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1812.638491] env[62519]: DEBUG nova.network.neutron [-] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1812.662108] env[62519]: DEBUG oslo_vmware.api [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803029, 'name': PowerOnVM_Task, 'duration_secs': 0.540457} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.662493] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1812.662763] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d15fc234-4631-453a-b364-87a19d8e4ec4 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance '5da884af-d8d2-409b-99bd-e5370e44e9f0' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1812.706384] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 3aab3adb-b28b-45dd-880f-b1cfbaeeed0c] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1812.739446] env[62519]: DEBUG nova.compute.manager [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1812.770750] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803030, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.784107] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803031, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.786735] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1812.787164] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1812.787164] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1812.787358] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1812.787501] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1812.787647] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1812.787856] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1812.788086] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1812.788274] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1812.788441] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1812.788613] env[62519]: DEBUG nova.virt.hardware [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1812.789669] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ba400f-e8e3-41e9-82a4-88683026230e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.808655] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e61971-75a0-4b8f-99c0-a282a0870109 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.812996] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520e98ed-853c-c805-81d9-b071522f6886, 'name': SearchDatastore_Task, 'duration_secs': 0.023279} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.822994] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7599e1d4-a489-4e38-bffd-ccfe7b96a8c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.825139] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.825408] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.843051] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1812.843051] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52abfb2b-a349-6f30-13d0-92fa985337bb" [ 1812.843051] env[62519]: _type = "Task" [ 1812.843051] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.859132] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52abfb2b-a349-6f30-13d0-92fa985337bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.076843] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "refresh_cache-21daee7a-1788-4a1c-969e-65b696702bf2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.077512] env[62519]: DEBUG nova.compute.manager [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Instance network_info: |[{"id": "24a7965e-a68b-4ffc-b5ab-d1bbaf657f40", "address": "fa:16:3e:20:6c:4a", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24a7965e-a6", "ovs_interfaceid": "24a7965e-a68b-4ffc-b5ab-d1bbaf657f40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1813.077674] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] Acquired lock "refresh_cache-21daee7a-1788-4a1c-969e-65b696702bf2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.077972] env[62519]: DEBUG nova.network.neutron [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Refreshing network info cache for port 24a7965e-a68b-4ffc-b5ab-d1bbaf657f40 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1813.079376] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:6c:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24a7965e-a68b-4ffc-b5ab-d1bbaf657f40', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1813.088992] env[62519]: DEBUG oslo.service.loopingcall [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1813.089646] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1813.089926] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea51292e-587a-4fe7-a379-8e81c18e18cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.115608] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1813.115608] env[62519]: value = "task-1803032" [ 1813.115608] env[62519]: _type = "Task" [ 1813.115608] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.128997] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803032, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.211640] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 56790738-4759-468a-9f43-f9c2bc2de23a] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1813.267201] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803030, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600991} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.267603] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 1c4615c7-d145-4529-98bd-1ae3ed51e1b5/1c4615c7-d145-4529-98bd-1ae3ed51e1b5.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1813.267813] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1813.268083] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c37da52-4404-4720-bdd2-d72cbd57f85c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.280165] env[62519]: DEBUG oslo_vmware.api [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803031, 'name': PowerOnVM_Task, 'duration_secs': 0.808983} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.281461] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1813.281666] env[62519]: INFO nova.compute.manager [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Took 15.50 seconds to spawn the instance on the hypervisor. [ 1813.282796] env[62519]: DEBUG nova.compute.manager [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1813.282796] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1813.282796] env[62519]: value = "task-1803033" [ 1813.282796] env[62519]: _type = "Task" [ 1813.282796] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.283896] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bc2a39-aff6-443a-b36f-729ecdad71f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.296971] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803033, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.327731] env[62519]: DEBUG nova.compute.manager [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1813.356952] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52abfb2b-a349-6f30-13d0-92fa985337bb, 'name': SearchDatastore_Task, 'duration_secs': 0.010472} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.356952] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.358204] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2/71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1813.358204] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7f0f59e-5347-4b5c-bcde-d440d78ec8a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.366909] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1813.366909] env[62519]: value = "task-1803034" [ 1813.366909] env[62519]: _type = "Task" [ 1813.366909] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.372203] env[62519]: DEBUG nova.network.neutron [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Updated VIF entry in instance network info cache for port 59a0726b-5696-46b1-88ef-7b165b586888. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1813.372548] env[62519]: DEBUG nova.network.neutron [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Updating instance_info_cache with network_info: [{"id": "59a0726b-5696-46b1-88ef-7b165b586888", "address": "fa:16:3e:f9:06:41", "network": {"id": "b50c1a89-c14c-481c-b5d9-748f18863a45", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-698120386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9808ad7e38e34658aac06ebc932b0e32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59a0726b-56", "ovs_interfaceid": "59a0726b-5696-46b1-88ef-7b165b586888", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.380483] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.502784] env[62519]: DEBUG nova.network.neutron [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updating instance_info_cache with network_info: [{"id": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "address": "fa:16:3e:cb:3e:93", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cb0499-3e", "ovs_interfaceid": "55cb0499-3e6e-42ef-bd75-edafccb32e03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.628800] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803032, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.712605] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 42497ab5-cce9-4614-a6d1-dffbf6764d7b] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1813.797278] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803033, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190629} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.797579] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1813.798364] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82940485-7fc1-4b40-912a-694bb35468f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.826286] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 1c4615c7-d145-4529-98bd-1ae3ed51e1b5/1c4615c7-d145-4529-98bd-1ae3ed51e1b5.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1813.826908] env[62519]: INFO nova.compute.manager [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Took 40.21 seconds to build instance. [ 1813.830184] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a3baabe-61f3-460f-98d7-7ace36864a03 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.855164] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1813.855164] env[62519]: value = "task-1803035" [ 1813.855164] env[62519]: _type = "Task" [ 1813.855164] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.867055] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803035, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.868222] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.868453] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.870060] env[62519]: INFO nova.compute.claims [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1813.876884] env[62519]: DEBUG oslo_concurrency.lockutils [req-64e09456-292f-4248-a440-1f246c1b0b2f req-c55470e1-0b55-499b-8df2-de5d941471b4 service nova] Releasing lock "refresh_cache-71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.883287] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803034, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.942157] env[62519]: DEBUG nova.network.neutron [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Successfully updated port: 7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1814.005982] env[62519]: DEBUG oslo_concurrency.lockutils [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Releasing lock "refresh_cache-99f22198-1a65-4d0d-b665-90c7063dbdb9" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.036311] env[62519]: DEBUG nova.compute.manager [req-5199bc11-e3b2-43c2-8f55-69ee1acf8261 req-87a8b15d-0dd5-4a2b-8fcb-57ac9f1e8b68 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Received event network-changed-e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1814.036311] env[62519]: DEBUG nova.compute.manager [req-5199bc11-e3b2-43c2-8f55-69ee1acf8261 req-87a8b15d-0dd5-4a2b-8fcb-57ac9f1e8b68 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Refreshing instance network info cache due to event network-changed-e252f987-8815-4905-a206-237207d5ac4b. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1814.036311] env[62519]: DEBUG oslo_concurrency.lockutils [req-5199bc11-e3b2-43c2-8f55-69ee1acf8261 req-87a8b15d-0dd5-4a2b-8fcb-57ac9f1e8b68 service nova] Acquiring lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.036311] env[62519]: DEBUG oslo_concurrency.lockutils [req-5199bc11-e3b2-43c2-8f55-69ee1acf8261 req-87a8b15d-0dd5-4a2b-8fcb-57ac9f1e8b68 service nova] Acquired lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.036311] env[62519]: DEBUG nova.network.neutron [req-5199bc11-e3b2-43c2-8f55-69ee1acf8261 req-87a8b15d-0dd5-4a2b-8fcb-57ac9f1e8b68 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Refreshing network info cache for port e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1814.063042] env[62519]: DEBUG nova.network.neutron [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Updated VIF entry in instance network info cache for port 24a7965e-a68b-4ffc-b5ab-d1bbaf657f40. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1814.063042] env[62519]: DEBUG nova.network.neutron [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Updating instance_info_cache with network_info: [{"id": "24a7965e-a68b-4ffc-b5ab-d1bbaf657f40", "address": "fa:16:3e:20:6c:4a", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24a7965e-a6", "ovs_interfaceid": "24a7965e-a68b-4ffc-b5ab-d1bbaf657f40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.131612] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803032, 'name': CreateVM_Task, 'duration_secs': 0.558505} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.132018] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1814.132875] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.133203] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.133731] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1814.136305] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-936b1e70-b35e-414e-a764-0ba0952d5f6f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.140763] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1814.140763] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520efcae-9fdb-b458-b4a2-6a8c44de0537" [ 1814.140763] env[62519]: _type = "Task" [ 1814.140763] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.152636] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520efcae-9fdb-b458-b4a2-6a8c44de0537, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.216343] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c61c893f-826b-4874-b253-de6fbffa9e5a] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1814.369292] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803035, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.391710] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803034, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.008339} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.392197] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2/71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1814.392502] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1814.392919] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7dcee298-8e95-4ce8-a353-b80f3806f30a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.405739] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1814.405739] env[62519]: value = "task-1803036" [ 1814.405739] env[62519]: _type = "Task" [ 1814.405739] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.416460] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803036, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.448459] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.448459] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.448459] env[62519]: DEBUG nova.network.neutron [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1814.515923] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "5da884af-d8d2-409b-99bd-e5370e44e9f0" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.515923] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.515994] env[62519]: DEBUG nova.compute.manager [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Going to confirm migration 4 {{(pid=62519) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5235}} [ 1814.565357] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d5104a6-db15-40df-a453-03e380844c74 req-1b2579d9-86d7-4a80-8f13-69cfcff7d11f service nova] Releasing lock "refresh_cache-21daee7a-1788-4a1c-969e-65b696702bf2" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.568408] env[62519]: INFO nova.network.neutron [req-5199bc11-e3b2-43c2-8f55-69ee1acf8261 req-87a8b15d-0dd5-4a2b-8fcb-57ac9f1e8b68 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Port e252f987-8815-4905-a206-237207d5ac4b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1814.568408] env[62519]: DEBUG nova.network.neutron [req-5199bc11-e3b2-43c2-8f55-69ee1acf8261 req-87a8b15d-0dd5-4a2b-8fcb-57ac9f1e8b68 service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.654476] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520efcae-9fdb-b458-b4a2-6a8c44de0537, 'name': SearchDatastore_Task, 'duration_secs': 0.056426} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.654934] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.655510] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1814.655861] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.656303] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.656596] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1814.657103] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af4bb317-a925-4557-9a3e-f070a60fa71c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.680026] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1814.680026] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1814.680026] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de13b52e-7637-4c60-8a57-d2b46678353e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.685752] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1814.685752] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fac636-b72f-7af2-c91b-8c826cf2d873" [ 1814.685752] env[62519]: _type = "Task" [ 1814.685752] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.695300] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fac636-b72f-7af2-c91b-8c826cf2d873, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.724310] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 91902e7f-8c15-447b-a3a8-04433434b1b6] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1814.770960] env[62519]: DEBUG nova.network.neutron [-] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.797541] env[62519]: DEBUG nova.compute.manager [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Received event network-vif-plugged-7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1814.797765] env[62519]: DEBUG oslo_concurrency.lockutils [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] Acquiring lock "f3665f89-1747-4567-9e56-c937d4ac81da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.798102] env[62519]: DEBUG oslo_concurrency.lockutils [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] Lock "f3665f89-1747-4567-9e56-c937d4ac81da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.798153] env[62519]: DEBUG oslo_concurrency.lockutils [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] Lock "f3665f89-1747-4567-9e56-c937d4ac81da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.798304] env[62519]: DEBUG nova.compute.manager [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] No waiting events found dispatching network-vif-plugged-7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1814.798487] env[62519]: WARNING nova.compute.manager [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Received unexpected event network-vif-plugged-7220339b-d4a7-441b-8227-c956744ce0c0 for instance with vm_state building and task_state spawning. [ 1814.798615] env[62519]: DEBUG nova.compute.manager [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Received event network-changed-7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1814.798808] env[62519]: DEBUG nova.compute.manager [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Refreshing instance network info cache due to event network-changed-7220339b-d4a7-441b-8227-c956744ce0c0. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1814.798910] env[62519]: DEBUG oslo_concurrency.lockutils [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] Acquiring lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.875033] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803035, 'name': ReconfigVM_Task, 'duration_secs': 0.62822} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.875336] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 1c4615c7-d145-4529-98bd-1ae3ed51e1b5/1c4615c7-d145-4529-98bd-1ae3ed51e1b5.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1814.876010] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29377611-362f-4f54-87af-e07cdf365d70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.889398] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1814.889398] env[62519]: value = "task-1803037" [ 1814.889398] env[62519]: _type = "Task" [ 1814.889398] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.903841] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803037, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.916612] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803036, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091696} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.916896] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1814.917823] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747c31eb-6a3c-415a-a64b-dd8c14eac2cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.948850] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2/71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1814.949912] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51121e0c-70ac-49f3-9f7b-0ac43c2313b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.980406] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1814.980406] env[62519]: value = "task-1803038" [ 1814.980406] env[62519]: _type = "Task" [ 1814.980406] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.990180] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803038, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.015087] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1815.015714] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f58770d-d246-46bc-b59b-0b3b06eac6cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.024677] env[62519]: DEBUG oslo_vmware.api [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1815.024677] env[62519]: value = "task-1803039" [ 1815.024677] env[62519]: _type = "Task" [ 1815.024677] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.034547] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "88f9351c-253b-49dd-a88e-b8585ea742ac" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.034788] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.037753] env[62519]: DEBUG nova.network.neutron [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1815.043650] env[62519]: DEBUG oslo_vmware.api [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803039, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.070829] env[62519]: DEBUG oslo_concurrency.lockutils [req-5199bc11-e3b2-43c2-8f55-69ee1acf8261 req-87a8b15d-0dd5-4a2b-8fcb-57ac9f1e8b68 service nova] Releasing lock "refresh_cache-bace23b3-b7f4-4f3b-8986-0076440d096d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.162283] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.162283] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.163028] env[62519]: DEBUG nova.network.neutron [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1815.163028] env[62519]: DEBUG nova.objects.instance [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lazy-loading 'info_cache' on Instance uuid 5da884af-d8d2-409b-99bd-e5370e44e9f0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1815.200640] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fac636-b72f-7af2-c91b-8c826cf2d873, 'name': SearchDatastore_Task, 'duration_secs': 0.033525} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.204687] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c063542-7053-4e42-82c2-7335dd47883c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.212935] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1815.212935] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525128c9-ebb4-e7f2-67c6-38f8f067ec6d" [ 1815.212935] env[62519]: _type = "Task" [ 1815.212935] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.227409] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 029ee07c-705d-452b-9b14-385d69f2fbbb] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1815.234889] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525128c9-ebb4-e7f2-67c6-38f8f067ec6d, 'name': SearchDatastore_Task, 'duration_secs': 0.013415} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.235122] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.235610] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 21daee7a-1788-4a1c-969e-65b696702bf2/21daee7a-1788-4a1c-969e-65b696702bf2.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1815.235610] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64ddc7e6-37bc-426e-bf03-8b23b7a2ad07 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.244745] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1815.244745] env[62519]: value = "task-1803040" [ 1815.244745] env[62519]: _type = "Task" [ 1815.244745] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.256986] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803040, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.271650] env[62519]: DEBUG nova.network.neutron [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating instance_info_cache with network_info: [{"id": "7220339b-d4a7-441b-8227-c956744ce0c0", "address": "fa:16:3e:0f:ad:ae", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7220339b-d4", "ovs_interfaceid": "7220339b-d4a7-441b-8227-c956744ce0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.275569] env[62519]: INFO nova.compute.manager [-] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Took 2.64 seconds to deallocate network for instance. [ 1815.350716] env[62519]: DEBUG oslo_concurrency.lockutils [None req-58d8e09b-2ec4-4296-bd95-201b039f8f45 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "2bc8f11e-82fc-4acb-945e-15327c133920" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.742s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.392624] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c9b0ce-a384-4b63-b400-90c68daa19bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.411890] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b95ca8f-88ed-41ac-bcc7-3019f2a103c6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.416946] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803037, 'name': Rename_Task, 'duration_secs': 0.250263} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.417365] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1815.421080] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d070b5a-5b15-49ed-ab62-be3e1fa11ed0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.470870] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db54085-8d21-4aeb-bf04-3600dba1f1e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.474293] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1815.474293] env[62519]: value = "task-1803041" [ 1815.474293] env[62519]: _type = "Task" [ 1815.474293] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.488179] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb4b703-97b9-42b8-a15b-11b86db3f89b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.498638] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803041, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.506173] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803038, 'name': ReconfigVM_Task, 'duration_secs': 0.322931} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.519183] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2/71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1815.520450] env[62519]: DEBUG nova.compute.provider_tree [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1815.522184] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16db8919-0877-40a4-bf5f-349cab8f9716 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.532973] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1815.532973] env[62519]: value = "task-1803042" [ 1815.532973] env[62519]: _type = "Task" [ 1815.532973] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.537644] env[62519]: DEBUG oslo_vmware.api [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803039, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.541959] env[62519]: INFO nova.compute.manager [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Detaching volume 5bd7c4a6-d552-4040-aefb-7d5577b89149 [ 1815.555172] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803042, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.596441] env[62519]: INFO nova.virt.block_device [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Attempting to driver detach volume 5bd7c4a6-d552-4040-aefb-7d5577b89149 from mountpoint /dev/sdb [ 1815.596890] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1815.596890] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1815.597915] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d0e3d3-c1c1-455c-a189-b44f8ab9203d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.624354] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a214d1-1178-4ae3-b55a-ac30b0ff55aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.636693] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2a6549-516e-4720-b8cc-0ed6d2f6f402 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.663659] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbda26a-fad9-45c9-a94c-5196a81031ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.687318] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] The volume has not been displaced from its original location: [datastore1] volume-5bd7c4a6-d552-4040-aefb-7d5577b89149/volume-5bd7c4a6-d552-4040-aefb-7d5577b89149.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1815.693431] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfiguring VM instance instance-00000047 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1815.694915] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a56d0634-3bd2-463f-aaae-d8b3ec80f3a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.718403] env[62519]: DEBUG oslo_vmware.api [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1815.718403] env[62519]: value = "task-1803043" [ 1815.718403] env[62519]: _type = "Task" [ 1815.718403] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.732498] env[62519]: DEBUG oslo_vmware.api [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803043, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.736038] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 099112ae-569b-4853-bc47-b0b8b97d2525] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1815.757834] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803040, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.777341] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.777767] env[62519]: DEBUG nova.compute.manager [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Instance network_info: |[{"id": "7220339b-d4a7-441b-8227-c956744ce0c0", "address": "fa:16:3e:0f:ad:ae", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7220339b-d4", "ovs_interfaceid": "7220339b-d4a7-441b-8227-c956744ce0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1815.778697] env[62519]: DEBUG oslo_concurrency.lockutils [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] Acquired lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.779020] env[62519]: DEBUG nova.network.neutron [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Refreshing network info cache for port 7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1815.780614] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:ad:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7220339b-d4a7-441b-8227-c956744ce0c0', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1815.788783] env[62519]: DEBUG oslo.service.loopingcall [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1815.792778] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1815.793323] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5f40791-66ae-47e9-9010-68efb8a100de {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.817615] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1815.817615] env[62519]: value = "task-1803044" [ 1815.817615] env[62519]: _type = "Task" [ 1815.817615] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.828798] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803044, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.836770] env[62519]: INFO nova.compute.manager [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Took 0.56 seconds to detach 1 volumes for instance. [ 1815.839304] env[62519]: DEBUG nova.compute.manager [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Deleting volume: f0e225ed-ae23-4580-9ecb-e74214791c63 {{(pid=62519) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3284}} [ 1815.989887] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.025414] env[62519]: DEBUG nova.scheduler.client.report [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1816.045281] env[62519]: DEBUG oslo_vmware.api [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803039, 'name': PowerOnVM_Task, 'duration_secs': 0.814524} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.046059] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1816.046294] env[62519]: DEBUG nova.compute.manager [None req-35032167-8053-431a-a317-fa8f7342ca10 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1816.047227] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9484d1a2-ede6-47e6-93e9-57e613b077f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.056750] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803042, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.058778] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b176694c-fe81-499e-b4fb-7c7b16d02765 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.073246] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c2e571e-42a3-42dc-b59f-3795fba983ba tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Suspending the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1816.075888] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3358d1cd-02bc-407d-a975-d34b19985d50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.085102] env[62519]: DEBUG oslo_vmware.api [None req-7c2e571e-42a3-42dc-b59f-3795fba983ba tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1816.085102] env[62519]: value = "task-1803046" [ 1816.085102] env[62519]: _type = "Task" [ 1816.085102] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.102271] env[62519]: DEBUG oslo_vmware.api [None req-7c2e571e-42a3-42dc-b59f-3795fba983ba tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803046, 'name': SuspendVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.232994] env[62519]: DEBUG oslo_vmware.api [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803043, 'name': ReconfigVM_Task, 'duration_secs': 0.450696} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.233400] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Reconfigured VM instance instance-00000047 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1816.240309] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 34d2991e-b6df-473d-8994-e45ff57ef131] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1816.242625] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c4daae7-b64f-4616-a5df-64ad02d2e414 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.263257] env[62519]: DEBUG nova.network.neutron [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updated VIF entry in instance network info cache for port 7220339b-d4a7-441b-8227-c956744ce0c0. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1816.263682] env[62519]: DEBUG nova.network.neutron [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating instance_info_cache with network_info: [{"id": "7220339b-d4a7-441b-8227-c956744ce0c0", "address": "fa:16:3e:0f:ad:ae", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7220339b-d4", "ovs_interfaceid": "7220339b-d4a7-441b-8227-c956744ce0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.269188] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803040, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73948} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.270375] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 21daee7a-1788-4a1c-969e-65b696702bf2/21daee7a-1788-4a1c-969e-65b696702bf2.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1816.270670] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1816.271047] env[62519]: DEBUG oslo_vmware.api [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1816.271047] env[62519]: value = "task-1803047" [ 1816.271047] env[62519]: _type = "Task" [ 1816.271047] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.271253] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fac7c51f-4e7e-4b6f-9b0a-0e637a43403a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.285330] env[62519]: DEBUG oslo_vmware.api [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803047, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.287036] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1816.287036] env[62519]: value = "task-1803048" [ 1816.287036] env[62519]: _type = "Task" [ 1816.287036] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.301441] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.327934] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803044, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.400315] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.460584] env[62519]: DEBUG nova.network.neutron [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance_info_cache with network_info: [{"id": "f8e70085-af0c-4731-8b61-cf5cd2460344", "address": "fa:16:3e:4f:98:8a", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8e70085-af", "ovs_interfaceid": "f8e70085-af0c-4731-8b61-cf5cd2460344", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.486553] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803041, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.534551] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.535102] env[62519]: DEBUG nova.compute.manager [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1816.542023] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.138s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.542023] env[62519]: DEBUG nova.objects.instance [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lazy-loading 'resources' on Instance uuid bace23b3-b7f4-4f3b-8986-0076440d096d {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1816.552425] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803042, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.596066] env[62519]: DEBUG oslo_vmware.api [None req-7c2e571e-42a3-42dc-b59f-3795fba983ba tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803046, 'name': SuspendVM_Task} progress is 62%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.755941] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 2b391628-18a2-4606-8c59-58ba642cee50] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1816.771288] env[62519]: DEBUG oslo_concurrency.lockutils [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] Releasing lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.771704] env[62519]: DEBUG nova.compute.manager [req-27969c37-0894-41f1-9ba6-e1c48ac3d926 req-af77f142-aa55-4fe9-8d8a-21f81f6ed2ba service nova] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Received event network-vif-deleted-e252f987-8815-4905-a206-237207d5ac4b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1816.785342] env[62519]: DEBUG oslo_vmware.api [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803047, 'name': ReconfigVM_Task, 'duration_secs': 0.456799} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.785671] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373788', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'name': 'volume-5bd7c4a6-d552-4040-aefb-7d5577b89149', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '88f9351c-253b-49dd-a88e-b8585ea742ac', 'attached_at': '', 'detached_at': '', 'volume_id': '5bd7c4a6-d552-4040-aefb-7d5577b89149', 'serial': '5bd7c4a6-d552-4040-aefb-7d5577b89149'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1816.802926] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.38511} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.803386] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1816.804563] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e710ecd4-e30e-4cbc-a233-4d85ad72e090 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.835206] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 21daee7a-1788-4a1c-969e-65b696702bf2/21daee7a-1788-4a1c-969e-65b696702bf2.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1816.842339] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b48aefd6-a315-49b8-ba9a-3ade9a9e504f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.865589] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803044, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.867198] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1816.867198] env[62519]: value = "task-1803049" [ 1816.867198] env[62519]: _type = "Task" [ 1816.867198] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.876517] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.963526] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-5da884af-d8d2-409b-99bd-e5370e44e9f0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.963863] env[62519]: DEBUG nova.objects.instance [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lazy-loading 'migration_context' on Instance uuid 5da884af-d8d2-409b-99bd-e5370e44e9f0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1816.987885] env[62519]: DEBUG oslo_vmware.api [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803041, 'name': PowerOnVM_Task, 'duration_secs': 1.413346} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.988476] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1816.988476] env[62519]: INFO nova.compute.manager [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Took 13.21 seconds to spawn the instance on the hypervisor. [ 1816.988654] env[62519]: DEBUG nova.compute.manager [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1816.989518] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ae4aa8-3712-4277-a2f5-38823fd30c91 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.041640] env[62519]: DEBUG nova.compute.utils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1817.048994] env[62519]: DEBUG nova.compute.manager [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1817.049154] env[62519]: DEBUG nova.network.neutron [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1817.063413] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803042, 'name': Rename_Task, 'duration_secs': 1.211418} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.063717] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1817.063945] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4b9cf09-8032-4055-a0ef-d605c8b8a771 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.075679] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1817.075679] env[62519]: value = "task-1803050" [ 1817.075679] env[62519]: _type = "Task" [ 1817.075679] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.093686] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803050, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.104684] env[62519]: DEBUG oslo_vmware.api [None req-7c2e571e-42a3-42dc-b59f-3795fba983ba tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803046, 'name': SuspendVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.153551] env[62519]: DEBUG nova.policy [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2fdec6fcda84ddeaaa1ee4ba6a58258', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17cd969b1e7d4bd795748560caf80077', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1817.260233] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c8b7568b-ba07-4f65-818b-f84910209361] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1817.334396] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803044, 'name': CreateVM_Task, 'duration_secs': 1.303553} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.337284] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1817.338588] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.338588] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.338648] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1817.338910] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91378a1c-5d22-4563-97c7-064c5668e633 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.345283] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1817.345283] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522fe793-9233-bb19-df94-2a2ec2199426" [ 1817.345283] env[62519]: _type = "Task" [ 1817.345283] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.350422] env[62519]: DEBUG nova.objects.instance [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lazy-loading 'flavor' on Instance uuid 88f9351c-253b-49dd-a88e-b8585ea742ac {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1817.363864] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522fe793-9233-bb19-df94-2a2ec2199426, 'name': SearchDatastore_Task, 'duration_secs': 0.015092} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.364858] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.365058] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1817.365534] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.365534] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.365534] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1817.365851] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d9aedc4-3e75-44d6-8181-50c0b9a51841 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.382230] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803049, 'name': ReconfigVM_Task, 'duration_secs': 0.453064} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.383669] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 21daee7a-1788-4a1c-969e-65b696702bf2/21daee7a-1788-4a1c-969e-65b696702bf2.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1817.384836] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1817.389020] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1817.389020] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9535cdac-9db2-4ddb-b4c6-564419664537 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.391747] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-196b9670-22f6-424e-b174-e886a9afe204 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.398339] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1817.398339] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526f6787-1c23-368f-cdfa-f5fc09a0de88" [ 1817.398339] env[62519]: _type = "Task" [ 1817.398339] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.403830] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1817.403830] env[62519]: value = "task-1803051" [ 1817.403830] env[62519]: _type = "Task" [ 1817.403830] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.412985] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526f6787-1c23-368f-cdfa-f5fc09a0de88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.422472] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803051, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.445429] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ef7e4a-1f76-4f88-9ca2-623a87149926 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.454613] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d3c34b-ccc0-4f6a-8be0-a8ebbe0b91e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.489931] env[62519]: DEBUG nova.objects.base [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Object Instance<5da884af-d8d2-409b-99bd-e5370e44e9f0> lazy-loaded attributes: info_cache,migration_context {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1817.491248] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02089d5-0dfb-4e3f-8aea-063343dc69a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.496173] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e65050-bfee-4967-bb06-76967e5f859a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.530545] env[62519]: DEBUG nova.network.neutron [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Successfully created port: 0f634cea-fa73-4a6c-855a-cee16f97b053 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1817.532827] env[62519]: INFO nova.compute.manager [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Took 34.94 seconds to build instance. [ 1817.534548] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-630b6397-2fc4-4417-acfb-71de52386c25 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.538436] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9071595-5831-4b45-baea-bbac3fc5f633 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.545088] env[62519]: DEBUG nova.compute.manager [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1817.559128] env[62519]: DEBUG oslo_vmware.api [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1817.559128] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d70359-236a-c890-9a0c-d4a9aa113a21" [ 1817.559128] env[62519]: _type = "Task" [ 1817.559128] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.559934] env[62519]: DEBUG nova.compute.provider_tree [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1817.574253] env[62519]: DEBUG oslo_vmware.api [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d70359-236a-c890-9a0c-d4a9aa113a21, 'name': SearchDatastore_Task, 'duration_secs': 0.010299} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.574566] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.596339] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803050, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.604923] env[62519]: DEBUG oslo_vmware.api [None req-7c2e571e-42a3-42dc-b59f-3795fba983ba tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803046, 'name': SuspendVM_Task, 'duration_secs': 1.091079} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.605199] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c2e571e-42a3-42dc-b59f-3795fba983ba tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Suspended the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1817.605593] env[62519]: DEBUG nova.compute.manager [None req-7c2e571e-42a3-42dc-b59f-3795fba983ba tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1817.606620] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7897cfe9-dfe2-4c98-8c1a-c9efc7da81f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.765806] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: fe350d30-6fbd-4813-9634-ed05984fecfd] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 1817.918744] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803051, 'name': Rename_Task, 'duration_secs': 0.167037} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.919137] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526f6787-1c23-368f-cdfa-f5fc09a0de88, 'name': SearchDatastore_Task, 'duration_secs': 0.012955} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.919412] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1817.920272] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fec19e14-c97c-4d48-9e94-a8e54f0beb17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.921954] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af3a679e-8aa6-4ae8-b624-c232e8dd022e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.927950] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1817.927950] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524cf265-b07c-11ba-dc87-61dd2f236523" [ 1817.927950] env[62519]: _type = "Task" [ 1817.927950] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.933172] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1817.933172] env[62519]: value = "task-1803052" [ 1817.933172] env[62519]: _type = "Task" [ 1817.933172] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.940310] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524cf265-b07c-11ba-dc87-61dd2f236523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.945972] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.068031] env[62519]: DEBUG nova.scheduler.client.report [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1818.089142] env[62519]: DEBUG oslo_vmware.api [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803050, 'name': PowerOnVM_Task, 'duration_secs': 0.62366} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.089423] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1818.089719] env[62519]: INFO nova.compute.manager [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Took 11.82 seconds to spawn the instance on the hypervisor. [ 1818.089794] env[62519]: DEBUG nova.compute.manager [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1818.090752] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d2a4f4-2362-4590-901d-30b31a4b3d10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.269847] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.269847] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances with incomplete migration {{(pid=62519) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11758}} [ 1818.359863] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b898f1b7-b423-4b02-be64-5a276d701cb0 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.323s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.442993] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524cf265-b07c-11ba-dc87-61dd2f236523, 'name': SearchDatastore_Task, 'duration_secs': 0.018443} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.443898] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.444313] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f3665f89-1747-4567-9e56-c937d4ac81da/f3665f89-1747-4567-9e56-c937d4ac81da.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1818.444710] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2bea31e-98d5-4b0e-b065-9dc407056d10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.450588] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803052, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.455937] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1818.455937] env[62519]: value = "task-1803053" [ 1818.455937] env[62519]: _type = "Task" [ 1818.455937] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.463608] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.573105] env[62519]: DEBUG nova.compute.manager [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1818.576929] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.038s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.579834] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.005s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.610940] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1818.611249] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1818.611417] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1818.611600] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1818.611764] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1818.611938] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1818.612177] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1818.612340] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1818.612504] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1818.614666] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1818.614666] env[62519]: DEBUG nova.virt.hardware [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1818.616943] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e70424-70fc-483d-84c9-31cfe767f1b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.621308] env[62519]: INFO nova.compute.manager [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Took 32.17 seconds to build instance. [ 1818.630422] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5ce9d7-784c-409f-b3a8-21c977f4bac0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.681470] env[62519]: DEBUG oslo_concurrency.lockutils [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquiring lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.771260] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.946627] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803052, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.966672] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.040946] env[62519]: DEBUG oslo_concurrency.lockutils [None req-233e6b89-3144-41a2-9f7d-54ed63bc311d tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.462s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.041213] env[62519]: DEBUG oslo_concurrency.lockutils [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.360s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.041504] env[62519]: DEBUG oslo_concurrency.lockutils [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquiring lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.041808] env[62519]: DEBUG oslo_concurrency.lockutils [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.041894] env[62519]: DEBUG oslo_concurrency.lockutils [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.044326] env[62519]: INFO nova.compute.manager [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Terminating instance [ 1819.099548] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8026f4a0-6a99-4204-bf16-1e491a7b2e00 tempest-ServerActionsV293TestJSON-760697799 tempest-ServerActionsV293TestJSON-760697799-project-member] Lock "bace23b3-b7f4-4f3b-8986-0076440d096d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.111s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.200998] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "2bc8f11e-82fc-4acb-945e-15327c133920" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.201289] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "2bc8f11e-82fc-4acb-945e-15327c133920" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.201501] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "2bc8f11e-82fc-4acb-945e-15327c133920-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.201684] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "2bc8f11e-82fc-4acb-945e-15327c133920-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.201850] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "2bc8f11e-82fc-4acb-945e-15327c133920-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.203904] env[62519]: INFO nova.compute.manager [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Terminating instance [ 1819.235240] env[62519]: DEBUG nova.compute.manager [req-144de128-f5cc-423e-8dc7-a93251a80599 req-ffcfa10b-c7fc-4804-b03f-99c138cbefa6 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Received event network-vif-plugged-0f634cea-fa73-4a6c-855a-cee16f97b053 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1819.235240] env[62519]: DEBUG oslo_concurrency.lockutils [req-144de128-f5cc-423e-8dc7-a93251a80599 req-ffcfa10b-c7fc-4804-b03f-99c138cbefa6 service nova] Acquiring lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.235240] env[62519]: DEBUG oslo_concurrency.lockutils [req-144de128-f5cc-423e-8dc7-a93251a80599 req-ffcfa10b-c7fc-4804-b03f-99c138cbefa6 service nova] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.235240] env[62519]: DEBUG oslo_concurrency.lockutils [req-144de128-f5cc-423e-8dc7-a93251a80599 req-ffcfa10b-c7fc-4804-b03f-99c138cbefa6 service nova] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.235240] env[62519]: DEBUG nova.compute.manager [req-144de128-f5cc-423e-8dc7-a93251a80599 req-ffcfa10b-c7fc-4804-b03f-99c138cbefa6 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] No waiting events found dispatching network-vif-plugged-0f634cea-fa73-4a6c-855a-cee16f97b053 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1819.235240] env[62519]: WARNING nova.compute.manager [req-144de128-f5cc-423e-8dc7-a93251a80599 req-ffcfa10b-c7fc-4804-b03f-99c138cbefa6 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Received unexpected event network-vif-plugged-0f634cea-fa73-4a6c-855a-cee16f97b053 for instance with vm_state building and task_state spawning. [ 1819.259221] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "88f9351c-253b-49dd-a88e-b8585ea742ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.259221] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.259221] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "88f9351c-253b-49dd-a88e-b8585ea742ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.259221] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.259221] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.265146] env[62519]: INFO nova.compute.manager [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Terminating instance [ 1819.374643] env[62519]: DEBUG nova.network.neutron [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Successfully updated port: 0f634cea-fa73-4a6c-855a-cee16f97b053 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1819.441586] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89dda3d-b9c6-4eb7-a699-4963fc757b1a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.450373] env[62519]: DEBUG oslo_vmware.api [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803052, 'name': PowerOnVM_Task, 'duration_secs': 1.39381} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.452424] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1819.452654] env[62519]: INFO nova.compute.manager [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1819.452840] env[62519]: DEBUG nova.compute.manager [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1819.453673] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba58255-22e8-4c9a-bb3b-a4298c18b949 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.456990] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f901c9-e215-4c07-a629-0864b7f6f837 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.497384] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc77668-9f38-4ae1-9884-9fb918b58803 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.504740] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.511324] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860148f5-4cd8-45c3-b9a2-d88c4e735ac4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.527824] env[62519]: DEBUG nova.compute.provider_tree [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.548414] env[62519]: DEBUG nova.compute.manager [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1819.548414] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1819.549332] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a33ab6-957b-4216-9c92-4a0c3d897397 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.558277] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1819.558552] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3aeb17d1-62cd-4d9c-aad9-487af81b3213 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.566460] env[62519]: DEBUG oslo_vmware.api [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1819.566460] env[62519]: value = "task-1803054" [ 1819.566460] env[62519]: _type = "Task" [ 1819.566460] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.575443] env[62519]: DEBUG oslo_vmware.api [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803054, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.708429] env[62519]: DEBUG nova.compute.manager [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1819.709430] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1819.710156] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39225b7e-e8a7-486d-bea7-4862c11f4b78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.718670] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1819.718936] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f035ee9-4888-4a94-8a86-1e5a33b11f60 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.778397] env[62519]: DEBUG nova.compute.manager [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1819.778601] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1819.779947] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c25db8-9308-4ef1-af9b-3c7b0e5d665a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.790586] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1819.790586] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e76e69e-4573-470b-bccf-3f21059ab1bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.798249] env[62519]: DEBUG oslo_vmware.api [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1819.798249] env[62519]: value = "task-1803056" [ 1819.798249] env[62519]: _type = "Task" [ 1819.798249] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.808336] env[62519]: DEBUG oslo_vmware.api [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803056, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.879053] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "refresh_cache-4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.879288] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "refresh_cache-4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.879450] env[62519]: DEBUG nova.network.neutron [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1819.979029] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.007012] env[62519]: INFO nova.compute.manager [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Took 25.08 seconds to build instance. [ 1820.033159] env[62519]: DEBUG nova.scheduler.client.report [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1820.078909] env[62519]: DEBUG oslo_vmware.api [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803054, 'name': PowerOffVM_Task, 'duration_secs': 0.21132} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.078909] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1820.078909] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1820.079760] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-257637f6-1791-4d31-afc7-f02cd7296aec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.130175] env[62519]: DEBUG oslo_concurrency.lockutils [None req-34394a41-2f5a-4a60-841d-89a226332d22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.686s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.151322] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1820.151322] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1820.151322] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleting the datastore file [datastore1] 2bc8f11e-82fc-4acb-945e-15327c133920 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1820.152050] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc61c97a-5d03-492d-8af2-1e7f0f14e9d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.167569] env[62519]: DEBUG oslo_vmware.api [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1820.167569] env[62519]: value = "task-1803058" [ 1820.167569] env[62519]: _type = "Task" [ 1820.167569] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.168676] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1820.172097] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1820.172097] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Deleting the datastore file [datastore1] 1c4615c7-d145-4529-98bd-1ae3ed51e1b5 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1820.174489] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a215650f-e026-48c1-ad2e-34f9e9fe6e4a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.182710] env[62519]: DEBUG oslo_vmware.api [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.184149] env[62519]: DEBUG oslo_vmware.api [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for the task: (returnval){ [ 1820.184149] env[62519]: value = "task-1803059" [ 1820.184149] env[62519]: _type = "Task" [ 1820.184149] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.193720] env[62519]: DEBUG oslo_vmware.api [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803059, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.311480] env[62519]: DEBUG oslo_vmware.api [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803056, 'name': PowerOffVM_Task, 'duration_secs': 0.209165} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.311848] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1820.312077] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1820.312454] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3970d153-0943-4fde-8560-365512acaa1e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.403874] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1820.404247] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1820.404411] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleting the datastore file [datastore1] 88f9351c-253b-49dd-a88e-b8585ea742ac {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1820.404563] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25376d1b-fbd4-402f-b2e0-1be1710c1eed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.413826] env[62519]: DEBUG oslo_vmware.api [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1820.413826] env[62519]: value = "task-1803061" [ 1820.413826] env[62519]: _type = "Task" [ 1820.413826] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.423838] env[62519]: DEBUG oslo_vmware.api [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.449514] env[62519]: DEBUG nova.network.neutron [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1820.475665] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803053, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.657351] env[62519]: DEBUG nova.network.neutron [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Updating instance_info_cache with network_info: [{"id": "0f634cea-fa73-4a6c-855a-cee16f97b053", "address": "fa:16:3e:51:09:24", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f634cea-fa", "ovs_interfaceid": "0f634cea-fa73-4a6c-855a-cee16f97b053", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.682183] env[62519]: DEBUG oslo_vmware.api [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.696727] env[62519]: DEBUG oslo_vmware.api [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803059, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.928030] env[62519]: DEBUG oslo_vmware.api [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.985343] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803053, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.234347} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.985343] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f3665f89-1747-4567-9e56-c937d4ac81da/f3665f89-1747-4567-9e56-c937d4ac81da.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1820.985343] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1820.985343] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-411a080c-09f4-4b86-b773-281aba8bce65 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.996196] env[62519]: DEBUG nova.compute.manager [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1820.996803] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1820.996803] env[62519]: value = "task-1803062" [ 1820.996803] env[62519]: _type = "Task" [ 1820.996803] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.998128] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cec5a7b-421c-43c4-9b82-f0fba8cd30b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.026593] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.044698] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.465s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.164078] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "refresh_cache-4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.165026] env[62519]: DEBUG nova.compute.manager [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Instance network_info: |[{"id": "0f634cea-fa73-4a6c-855a-cee16f97b053", "address": "fa:16:3e:51:09:24", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f634cea-fa", "ovs_interfaceid": "0f634cea-fa73-4a6c-855a-cee16f97b053", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1821.165026] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:09:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89f807d9-140f-4a6f-8bce-96795f9482ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f634cea-fa73-4a6c-855a-cee16f97b053', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1821.172957] env[62519]: DEBUG oslo.service.loopingcall [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.173292] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1821.177186] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f129dfb-b48a-4620-a4c1-6a794035799b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.199149] env[62519]: DEBUG oslo_vmware.api [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.781343} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.199149] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1821.199149] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1821.199149] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1821.199265] env[62519]: INFO nova.compute.manager [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Took 1.49 seconds to destroy the instance on the hypervisor. [ 1821.199491] env[62519]: DEBUG oslo.service.loopingcall [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.200934] env[62519]: DEBUG nova.compute.manager [-] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1821.201041] env[62519]: DEBUG nova.network.neutron [-] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1821.202618] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1821.202618] env[62519]: value = "task-1803063" [ 1821.202618] env[62519]: _type = "Task" [ 1821.202618] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.205663] env[62519]: DEBUG oslo_vmware.api [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Task: {'id': task-1803059, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.725376} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.208900] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1821.209112] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1821.209295] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1821.209462] env[62519]: INFO nova.compute.manager [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1821.209686] env[62519]: DEBUG oslo.service.loopingcall [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.209873] env[62519]: DEBUG nova.compute.manager [-] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1821.209961] env[62519]: DEBUG nova.network.neutron [-] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1821.217603] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803063, 'name': CreateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.318645] env[62519]: DEBUG nova.compute.manager [req-809df263-b96a-456a-86ee-05d2ab7aadf1 req-66494b0e-68f0-483c-99e7-2784b5ad4259 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Received event network-changed-0f634cea-fa73-4a6c-855a-cee16f97b053 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1821.318872] env[62519]: DEBUG nova.compute.manager [req-809df263-b96a-456a-86ee-05d2ab7aadf1 req-66494b0e-68f0-483c-99e7-2784b5ad4259 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Refreshing instance network info cache due to event network-changed-0f634cea-fa73-4a6c-855a-cee16f97b053. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1821.319130] env[62519]: DEBUG oslo_concurrency.lockutils [req-809df263-b96a-456a-86ee-05d2ab7aadf1 req-66494b0e-68f0-483c-99e7-2784b5ad4259 service nova] Acquiring lock "refresh_cache-4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.319313] env[62519]: DEBUG oslo_concurrency.lockutils [req-809df263-b96a-456a-86ee-05d2ab7aadf1 req-66494b0e-68f0-483c-99e7-2784b5ad4259 service nova] Acquired lock "refresh_cache-4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.319491] env[62519]: DEBUG nova.network.neutron [req-809df263-b96a-456a-86ee-05d2ab7aadf1 req-66494b0e-68f0-483c-99e7-2784b5ad4259 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Refreshing network info cache for port 0f634cea-fa73-4a6c-855a-cee16f97b053 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1821.323436] env[62519]: DEBUG oslo_concurrency.lockutils [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "21daee7a-1788-4a1c-969e-65b696702bf2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.386535] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.386936] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.387346] env[62519]: INFO nova.compute.manager [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Rebooting instance [ 1821.425174] env[62519]: DEBUG oslo_vmware.api [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.623841} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.425533] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1821.425782] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1821.426032] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1821.426299] env[62519]: INFO nova.compute.manager [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1821.426731] env[62519]: DEBUG oslo.service.loopingcall [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.426845] env[62519]: DEBUG nova.compute.manager [-] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1821.426985] env[62519]: DEBUG nova.network.neutron [-] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1821.515320] env[62519]: DEBUG oslo_concurrency.lockutils [None req-04c01760-a718-4b80-a003-ee7594a61827 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "21daee7a-1788-4a1c-969e-65b696702bf2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.604s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.515725] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.332967} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.516042] env[62519]: DEBUG oslo_concurrency.lockutils [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "21daee7a-1788-4a1c-969e-65b696702bf2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.193s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.516771] env[62519]: DEBUG oslo_concurrency.lockutils [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "21daee7a-1788-4a1c-969e-65b696702bf2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.516771] env[62519]: DEBUG oslo_concurrency.lockutils [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "21daee7a-1788-4a1c-969e-65b696702bf2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.516771] env[62519]: DEBUG oslo_concurrency.lockutils [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "21daee7a-1788-4a1c-969e-65b696702bf2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.518804] env[62519]: INFO nova.compute.manager [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Terminating instance [ 1821.522189] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1821.523079] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9030dc67-aa65-4889-a837-067981257598 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.540818] env[62519]: INFO nova.compute.manager [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] instance snapshotting [ 1821.551216] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] f3665f89-1747-4567-9e56-c937d4ac81da/f3665f89-1747-4567-9e56-c937d4ac81da.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1821.552521] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728c1b4d-8554-4062-9137-56f5f77845b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.558322] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b9c3dd4-b437-413a-a2a4-47eb6ec94469 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.600089] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012cad0b-49ef-4ba1-b3ca-60be11a38e4d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.603081] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1821.603081] env[62519]: value = "task-1803064" [ 1821.603081] env[62519]: _type = "Task" [ 1821.603081] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.621009] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803064, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.663306] env[62519]: INFO nova.scheduler.client.report [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted allocation for migration 2ff5e489-a567-4b94-8bed-d596558da36f [ 1821.718210] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803063, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.907185] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.907432] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.907657] env[62519]: DEBUG nova.network.neutron [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1822.023841] env[62519]: DEBUG nova.compute.manager [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1822.023841] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1822.024617] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa56607-402e-4986-a2ef-242c9855e27f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.034033] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1822.034394] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4da80af-ff64-4b53-84d2-f23f3cd0dfd4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.042403] env[62519]: DEBUG oslo_vmware.api [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1822.042403] env[62519]: value = "task-1803065" [ 1822.042403] env[62519]: _type = "Task" [ 1822.042403] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.052355] env[62519]: DEBUG oslo_vmware.api [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.115460] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803064, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.117545] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1822.117924] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6910af3b-b2c6-44b6-9c30-10387e33d214 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.125148] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1822.125148] env[62519]: value = "task-1803066" [ 1822.125148] env[62519]: _type = "Task" [ 1822.125148] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.133984] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803066, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.173130] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ffe5d30-081a-4e0b-9cd4-c618bdbba7b6 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.655s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.177410] env[62519]: DEBUG nova.network.neutron [req-809df263-b96a-456a-86ee-05d2ab7aadf1 req-66494b0e-68f0-483c-99e7-2784b5ad4259 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Updated VIF entry in instance network info cache for port 0f634cea-fa73-4a6c-855a-cee16f97b053. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1822.177752] env[62519]: DEBUG nova.network.neutron [req-809df263-b96a-456a-86ee-05d2ab7aadf1 req-66494b0e-68f0-483c-99e7-2784b5ad4259 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Updating instance_info_cache with network_info: [{"id": "0f634cea-fa73-4a6c-855a-cee16f97b053", "address": "fa:16:3e:51:09:24", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f634cea-fa", "ovs_interfaceid": "0f634cea-fa73-4a6c-855a-cee16f97b053", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.220961] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803063, 'name': CreateVM_Task, 'duration_secs': 0.617706} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.222578] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1822.223248] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.223352] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.223664] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1822.223907] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70fc793a-ec16-4c74-ae80-b05d5babe666 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.232197] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1822.232197] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523bef9e-1bb0-6a37-129e-f73182ae199d" [ 1822.232197] env[62519]: _type = "Task" [ 1822.232197] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.240994] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523bef9e-1bb0-6a37-129e-f73182ae199d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.244036] env[62519]: DEBUG nova.network.neutron [-] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.247717] env[62519]: DEBUG nova.network.neutron [-] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.554882] env[62519]: DEBUG oslo_vmware.api [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803065, 'name': PowerOffVM_Task, 'duration_secs': 0.230042} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.555617] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1822.556041] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1822.556517] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ea76ad5-5bbf-4bab-a5a5-bf92e15c091f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.615638] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803064, 'name': ReconfigVM_Task, 'duration_secs': 0.569859} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.617934] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfigured VM instance instance-0000005f to attach disk [datastore1] f3665f89-1747-4567-9e56-c937d4ac81da/f3665f89-1747-4567-9e56-c937d4ac81da.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1822.618870] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91891afa-985b-43d0-ad1b-f5a4a014a931 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.626482] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1822.626482] env[62519]: value = "task-1803068" [ 1822.626482] env[62519]: _type = "Task" [ 1822.626482] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.640351] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803066, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.643618] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803068, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.657786] env[62519]: DEBUG nova.network.neutron [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.680846] env[62519]: DEBUG oslo_concurrency.lockutils [req-809df263-b96a-456a-86ee-05d2ab7aadf1 req-66494b0e-68f0-483c-99e7-2784b5ad4259 service nova] Releasing lock "refresh_cache-4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.728113] env[62519]: DEBUG nova.network.neutron [-] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.743556] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523bef9e-1bb0-6a37-129e-f73182ae199d, 'name': SearchDatastore_Task, 'duration_secs': 0.014371} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.744023] env[62519]: INFO nova.compute.manager [-] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Took 1.53 seconds to deallocate network for instance. [ 1822.744492] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.744725] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1822.744959] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.745124] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.745304] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1822.747602] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ed0b485-2875-4b82-92dc-e1c5fd8a26ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.754389] env[62519]: INFO nova.compute.manager [-] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Took 1.55 seconds to deallocate network for instance. [ 1822.754389] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1822.754389] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1822.754548] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleting the datastore file [datastore1] 21daee7a-1788-4a1c-969e-65b696702bf2 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1822.756553] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83a7f563-a18b-4354-9c15-a9824e7e082f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.764850] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1822.764922] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1822.767220] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-351c383e-c34f-4857-b221-2bf69b24000e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.769488] env[62519]: DEBUG oslo_vmware.api [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1822.769488] env[62519]: value = "task-1803069" [ 1822.769488] env[62519]: _type = "Task" [ 1822.769488] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.775041] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1822.775041] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525c05c2-c5c5-f3d2-4131-690c45e10550" [ 1822.775041] env[62519]: _type = "Task" [ 1822.775041] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.783991] env[62519]: DEBUG oslo_vmware.api [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.791206] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525c05c2-c5c5-f3d2-4131-690c45e10550, 'name': SearchDatastore_Task, 'duration_secs': 0.01211} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.791824] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fba0120-01a1-41cb-a670-9262686357cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.801234] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1822.801234] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521b4415-2561-bd0d-f609-25c748263725" [ 1822.801234] env[62519]: _type = "Task" [ 1822.801234] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.814240] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521b4415-2561-bd0d-f609-25c748263725, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.037712] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "5da884af-d8d2-409b-99bd-e5370e44e9f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.037712] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.037712] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "5da884af-d8d2-409b-99bd-e5370e44e9f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.037712] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.037712] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.040339] env[62519]: INFO nova.compute.manager [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Terminating instance [ 1823.147172] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803068, 'name': Rename_Task, 'duration_secs': 0.185472} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.151642] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1823.151859] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803066, 'name': CreateSnapshot_Task, 'duration_secs': 0.722632} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.152577] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55e13ed4-77c0-4d34-bfcb-51aed6a95064 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.154626] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1823.155579] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd44a565-4f5a-4188-9d10-018c751641d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.161158] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.181484] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1823.181484] env[62519]: value = "task-1803070" [ 1823.181484] env[62519]: _type = "Task" [ 1823.181484] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.186359] env[62519]: DEBUG nova.compute.manager [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1823.187564] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a76af02-0e04-4ef7-ac67-be0ec8acdff6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.199469] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803070, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.230624] env[62519]: INFO nova.compute.manager [-] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Took 1.80 seconds to deallocate network for instance. [ 1823.256749] env[62519]: DEBUG oslo_concurrency.lockutils [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.257276] env[62519]: DEBUG oslo_concurrency.lockutils [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.257525] env[62519]: DEBUG nova.objects.instance [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lazy-loading 'resources' on Instance uuid 1c4615c7-d145-4529-98bd-1ae3ed51e1b5 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1823.266565] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.284738] env[62519]: DEBUG oslo_vmware.api [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.313252] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521b4415-2561-bd0d-f609-25c748263725, 'name': SearchDatastore_Task, 'duration_secs': 0.016031} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.313596] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.313903] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6/4a9656fd-2b9f-4dd6-8b71-39e55813f2f6.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1823.314261] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08d99280-e7a0-48c7-8651-59f0c55e8b46 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.323530] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1823.323530] env[62519]: value = "task-1803071" [ 1823.323530] env[62519]: _type = "Task" [ 1823.323530] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.333666] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.381276] env[62519]: DEBUG nova.compute.manager [req-eaf1669d-4c73-47d2-aa22-92f1b7bba786 req-8705b641-0123-4fa7-91af-601c20955d27 service nova] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Received event network-vif-deleted-6541f904-9576-48c8-972b-84003d7a5315 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1823.382215] env[62519]: DEBUG nova.compute.manager [req-eaf1669d-4c73-47d2-aa22-92f1b7bba786 req-8705b641-0123-4fa7-91af-601c20955d27 service nova] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Received event network-vif-deleted-bc9c80f3-eef8-4cad-b111-3766f01949f7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1823.382215] env[62519]: DEBUG nova.compute.manager [req-eaf1669d-4c73-47d2-aa22-92f1b7bba786 req-8705b641-0123-4fa7-91af-601c20955d27 service nova] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Received event network-vif-deleted-f0eeb5fa-bf84-45a4-a90f-85d593feed7e {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1823.546368] env[62519]: DEBUG nova.compute.manager [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1823.546646] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1823.547893] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174d669a-3f2a-429f-bc90-218f667b4a8b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.558359] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1823.558359] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ec7488a-fa92-4d5a-9b1e-436ad66df456 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.566356] env[62519]: DEBUG oslo_vmware.api [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1823.566356] env[62519]: value = "task-1803072" [ 1823.566356] env[62519]: _type = "Task" [ 1823.566356] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.575942] env[62519]: DEBUG oslo_vmware.api [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.691318] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1823.692095] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d4d4d4da-7707-4ecf-9568-17ff2fc73edc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.710044] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803070, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.712023] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1823.712023] env[62519]: value = "task-1803073" [ 1823.712023] env[62519]: _type = "Task" [ 1823.712023] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.721534] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "e143206e-eb12-41b8-9140-229c1533fd80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.723100] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "e143206e-eb12-41b8-9140-229c1533fd80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.731765] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803073, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.737873] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.792153] env[62519]: DEBUG oslo_vmware.api [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.519324} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.792153] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1823.792153] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1823.792153] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1823.792153] env[62519]: INFO nova.compute.manager [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Took 1.77 seconds to destroy the instance on the hypervisor. [ 1823.792571] env[62519]: DEBUG oslo.service.loopingcall [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1823.793140] env[62519]: DEBUG nova.compute.manager [-] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1823.793366] env[62519]: DEBUG nova.network.neutron [-] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1823.837470] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803071, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.081423] env[62519]: DEBUG oslo_vmware.api [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803072, 'name': PowerOffVM_Task, 'duration_secs': 0.246965} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.081423] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1824.081423] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1824.081423] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d4c2a67-c17d-4e41-b647-affd0afd0ada {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.171480] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c57463-ea95-488c-9d65-e8cdc1ab709b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.180902] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443f1e27-efa5-4252-ac4a-5f8965498fd3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.219212] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075c975a-7171-4580-8a0d-0dc3ec4538cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.226759] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c139fd-ef28-42ef-bad3-6ea8ae0146da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.232860] env[62519]: DEBUG nova.compute.manager [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1824.250912] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803073, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.251253] env[62519]: DEBUG oslo_vmware.api [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803070, 'name': PowerOnVM_Task, 'duration_secs': 0.954664} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.251546] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Doing hard reboot of VM {{(pid=62519) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1824.253956] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883863ca-d4e6-45f5-994d-b927fd8d65b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.257934] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1824.258202] env[62519]: INFO nova.compute.manager [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Took 11.52 seconds to spawn the instance on the hypervisor. [ 1824.258425] env[62519]: DEBUG nova.compute.manager [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1824.258770] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-0d1d1edb-9b88-464d-9ee4-c71715e16162 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.260729] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1824.260729] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1824.260729] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleting the datastore file [datastore1] 5da884af-d8d2-409b-99bd-e5370e44e9f0 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1824.262156] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ab49e5-b989-4cee-b8cb-36c1cb6b5d2b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.264515] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc650433-b14a-4195-aa7c-d4556a1bddaa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.280193] env[62519]: DEBUG nova.compute.provider_tree [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1824.285016] env[62519]: DEBUG oslo_vmware.api [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1824.285016] env[62519]: value = "task-1803075" [ 1824.285016] env[62519]: _type = "Task" [ 1824.285016] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.288285] env[62519]: DEBUG oslo_vmware.api [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1824.288285] env[62519]: value = "task-1803076" [ 1824.288285] env[62519]: _type = "Task" [ 1824.288285] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.305872] env[62519]: DEBUG oslo_vmware.api [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.306196] env[62519]: DEBUG oslo_vmware.api [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803075, 'name': ResetVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.336347] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803071, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851706} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.337049] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6/4a9656fd-2b9f-4dd6-8b71-39e55813f2f6.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1824.337049] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1824.337249] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0fd914d2-9f04-4600-9aa8-ddbf650e9990 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.347223] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1824.347223] env[62519]: value = "task-1803077" [ 1824.347223] env[62519]: _type = "Task" [ 1824.347223] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.358668] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803077, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.619404] env[62519]: DEBUG nova.network.neutron [-] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.728322] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803073, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.777032] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.791185] env[62519]: DEBUG nova.scheduler.client.report [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1824.807567] env[62519]: INFO nova.compute.manager [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Took 23.73 seconds to build instance. [ 1824.826941] env[62519]: DEBUG oslo_vmware.api [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.827139] env[62519]: DEBUG oslo_vmware.api [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803075, 'name': ResetVM_Task, 'duration_secs': 0.119582} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.827382] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Did hard reboot of VM {{(pid=62519) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1824.827595] env[62519]: DEBUG nova.compute.manager [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1824.828558] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f758a70-4658-4c60-8e0b-0873961358d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.859464] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803077, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081815} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.859745] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1824.860880] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e159ea-8c24-4741-b2fa-1b8eafca59a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.886567] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6/4a9656fd-2b9f-4dd6-8b71-39e55813f2f6.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1824.887645] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72c6742f-bd62-42b0-aadf-a8ea023813a4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.909945] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1824.909945] env[62519]: value = "task-1803078" [ 1824.909945] env[62519]: _type = "Task" [ 1824.909945] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.921909] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.036908] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquiring lock "fcff790b-d267-4d8c-80d8-ad66cfb66539" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.037207] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.125395] env[62519]: INFO nova.compute.manager [-] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Took 1.33 seconds to deallocate network for instance. [ 1825.225738] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803073, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.299516] env[62519]: DEBUG oslo_concurrency.lockutils [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.042s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.302085] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.038s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.302545] env[62519]: DEBUG nova.objects.instance [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lazy-loading 'resources' on Instance uuid 2bc8f11e-82fc-4acb-945e-15327c133920 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1825.310504] env[62519]: DEBUG oslo_vmware.api [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.619578} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.310504] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1825.310637] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1825.310806] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1825.310972] env[62519]: INFO nova.compute.manager [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Took 1.76 seconds to destroy the instance on the hypervisor. [ 1825.311218] env[62519]: DEBUG oslo.service.loopingcall [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1825.311413] env[62519]: DEBUG nova.compute.manager [-] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1825.311500] env[62519]: DEBUG nova.network.neutron [-] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1825.321414] env[62519]: INFO nova.scheduler.client.report [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Deleted allocations for instance 1c4615c7-d145-4529-98bd-1ae3ed51e1b5 [ 1825.345422] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a436c06-fade-4716-bf01-f6402162aba1 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.958s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.423018] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803078, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.481944] env[62519]: DEBUG nova.compute.manager [req-b6032167-259f-45fd-b522-20a64975ff13 req-d5c8cab4-5dd5-472f-8f29-73c0812df40e service nova] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Received event network-vif-deleted-24a7965e-a68b-4ffc-b5ab-d1bbaf657f40 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1825.542305] env[62519]: DEBUG nova.compute.manager [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1825.632752] env[62519]: DEBUG oslo_concurrency.lockutils [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.728589] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803073, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.832653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-149c9fda-49e0-4e6e-bcab-f1c24a98f12b tempest-ServerAddressesTestJSON-575205737 tempest-ServerAddressesTestJSON-575205737-project-member] Lock "1c4615c7-d145-4529-98bd-1ae3ed51e1b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.791s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.927023] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803078, 'name': ReconfigVM_Task, 'duration_secs': 0.665473} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.929408] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6/4a9656fd-2b9f-4dd6-8b71-39e55813f2f6.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1825.929408] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44666001-32ca-4464-af9e-583ea56841cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.938974] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1825.938974] env[62519]: value = "task-1803079" [ 1825.938974] env[62519]: _type = "Task" [ 1825.938974] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.951811] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803079, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.068528] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.201204] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4a6db5-2f90-45d0-a94d-76fa55b4913a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.211522] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89936acb-7d67-4cf3-b5b9-120d8384041e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.250280] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07bc675-f06a-48c0-925e-66ee6128f24a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.256352] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803073, 'name': CloneVM_Task, 'duration_secs': 2.405058} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.257084] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Created linked-clone VM from snapshot [ 1826.257860] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d1587d-a7ba-4285-a3f6-1dd01ce211c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.264286] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffc241d-b982-4256-89c7-6c11d5d4895b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.272409] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Uploading image ed4e5c3e-d588-4416-a32b-8126c93c9db9 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1826.285820] env[62519]: DEBUG nova.compute.provider_tree [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1826.291022] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1826.291022] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-eac6575d-fc7e-4128-a933-85033c5f44a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.297578] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1826.297578] env[62519]: value = "task-1803080" [ 1826.297578] env[62519]: _type = "Task" [ 1826.297578] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.308666] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803080, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.320672] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3672f208-1af3-4e50-89f3-c43121d90870 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.262s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.418454] env[62519]: DEBUG nova.network.neutron [-] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.452706] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803079, 'name': Rename_Task, 'duration_secs': 0.198882} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.453009] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1826.453318] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7aa129a5-466f-45d8-9862-fd2ad60830bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.462460] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1826.462460] env[62519]: value = "task-1803081" [ 1826.462460] env[62519]: _type = "Task" [ 1826.462460] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.474735] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.789370] env[62519]: DEBUG nova.scheduler.client.report [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1826.810836] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803080, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.923860] env[62519]: INFO nova.compute.manager [-] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Took 1.61 seconds to deallocate network for instance. [ 1826.974879] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803081, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.295097] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.993s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.298362] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.560s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.298659] env[62519]: DEBUG nova.objects.instance [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lazy-loading 'resources' on Instance uuid 88f9351c-253b-49dd-a88e-b8585ea742ac {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1827.311414] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803080, 'name': Destroy_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.324131] env[62519]: INFO nova.scheduler.client.report [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted allocations for instance 2bc8f11e-82fc-4acb-945e-15327c133920 [ 1827.431574] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.475429] env[62519]: DEBUG oslo_vmware.api [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803081, 'name': PowerOnVM_Task, 'duration_secs': 0.733192} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.476716] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1827.476716] env[62519]: INFO nova.compute.manager [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Took 8.90 seconds to spawn the instance on the hypervisor. [ 1827.476716] env[62519]: DEBUG nova.compute.manager [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1827.477667] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71919cd4-e946-49b9-9be4-3fbc3e6e46b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.524743] env[62519]: DEBUG nova.compute.manager [req-3cafca9e-c022-4699-b29b-7fba8475028f req-e5bbced4-0e10-4b1d-8799-ce6174da5f86 service nova] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Received event network-vif-deleted-f8e70085-af0c-4731-8b61-cf5cd2460344 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1827.524978] env[62519]: DEBUG nova.compute.manager [req-3cafca9e-c022-4699-b29b-7fba8475028f req-e5bbced4-0e10-4b1d-8799-ce6174da5f86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Received event network-changed-7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1827.525037] env[62519]: DEBUG nova.compute.manager [req-3cafca9e-c022-4699-b29b-7fba8475028f req-e5bbced4-0e10-4b1d-8799-ce6174da5f86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Refreshing instance network info cache due to event network-changed-7220339b-d4a7-441b-8227-c956744ce0c0. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1827.525502] env[62519]: DEBUG oslo_concurrency.lockutils [req-3cafca9e-c022-4699-b29b-7fba8475028f req-e5bbced4-0e10-4b1d-8799-ce6174da5f86 service nova] Acquiring lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.526071] env[62519]: DEBUG oslo_concurrency.lockutils [req-3cafca9e-c022-4699-b29b-7fba8475028f req-e5bbced4-0e10-4b1d-8799-ce6174da5f86 service nova] Acquired lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.526071] env[62519]: DEBUG nova.network.neutron [req-3cafca9e-c022-4699-b29b-7fba8475028f req-e5bbced4-0e10-4b1d-8799-ce6174da5f86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Refreshing network info cache for port 7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1827.817160] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803080, 'name': Destroy_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.834490] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8f79086b-5bde-403c-9e87-ab8a69033e69 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "2bc8f11e-82fc-4acb-945e-15327c133920" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.633s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.000176] env[62519]: INFO nova.compute.manager [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Took 14.15 seconds to build instance. [ 1828.103837] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044460f2-4d4e-409c-8c2a-47065a65539c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.112944] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e99cc62-e8fd-4259-9d38-0dffe895cbaf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.148963] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8078ab-0762-41e3-bbdd-fb4d7f443bab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.159421] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71401dab-34ee-4f26-af76-a677c934879c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.175866] env[62519]: DEBUG nova.compute.provider_tree [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.311578] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803080, 'name': Destroy_Task, 'duration_secs': 1.76231} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.311861] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Destroyed the VM [ 1828.312103] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1828.312556] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4a87d80c-5dc2-4fb2-bc8f-72a8ebb4a05a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.316198] env[62519]: DEBUG nova.network.neutron [req-3cafca9e-c022-4699-b29b-7fba8475028f req-e5bbced4-0e10-4b1d-8799-ce6174da5f86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updated VIF entry in instance network info cache for port 7220339b-d4a7-441b-8227-c956744ce0c0. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1828.316525] env[62519]: DEBUG nova.network.neutron [req-3cafca9e-c022-4699-b29b-7fba8475028f req-e5bbced4-0e10-4b1d-8799-ce6174da5f86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating instance_info_cache with network_info: [{"id": "7220339b-d4a7-441b-8227-c956744ce0c0", "address": "fa:16:3e:0f:ad:ae", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7220339b-d4", "ovs_interfaceid": "7220339b-d4a7-441b-8227-c956744ce0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.321073] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1828.321073] env[62519]: value = "task-1803082" [ 1828.321073] env[62519]: _type = "Task" [ 1828.321073] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.330741] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803082, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.678793] env[62519]: DEBUG nova.scheduler.client.report [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1828.818961] env[62519]: DEBUG oslo_concurrency.lockutils [req-3cafca9e-c022-4699-b29b-7fba8475028f req-e5bbced4-0e10-4b1d-8799-ce6174da5f86 service nova] Releasing lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.840035] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803082, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.155728] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.155728] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.185211] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.187926] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.411s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.190031] env[62519]: INFO nova.compute.claims [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1829.212352] env[62519]: INFO nova.scheduler.client.report [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted allocations for instance 88f9351c-253b-49dd-a88e-b8585ea742ac [ 1829.335994] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803082, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.509539] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de264d36-6a35-4acb-b918-a8ec7d4ea06e tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.684s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.549382] env[62519]: DEBUG nova.compute.manager [req-7ec636cf-89ae-4985-a42a-5db28a3624e4 req-6976ad31-b56c-45eb-a7e4-f272a69ab7c0 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Received event network-changed-0f634cea-fa73-4a6c-855a-cee16f97b053 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1829.549585] env[62519]: DEBUG nova.compute.manager [req-7ec636cf-89ae-4985-a42a-5db28a3624e4 req-6976ad31-b56c-45eb-a7e4-f272a69ab7c0 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Refreshing instance network info cache due to event network-changed-0f634cea-fa73-4a6c-855a-cee16f97b053. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1829.549800] env[62519]: DEBUG oslo_concurrency.lockutils [req-7ec636cf-89ae-4985-a42a-5db28a3624e4 req-6976ad31-b56c-45eb-a7e4-f272a69ab7c0 service nova] Acquiring lock "refresh_cache-4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.549940] env[62519]: DEBUG oslo_concurrency.lockutils [req-7ec636cf-89ae-4985-a42a-5db28a3624e4 req-6976ad31-b56c-45eb-a7e4-f272a69ab7c0 service nova] Acquired lock "refresh_cache-4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.550116] env[62519]: DEBUG nova.network.neutron [req-7ec636cf-89ae-4985-a42a-5db28a3624e4 req-6976ad31-b56c-45eb-a7e4-f272a69ab7c0 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Refreshing network info cache for port 0f634cea-fa73-4a6c-855a-cee16f97b053 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1829.658686] env[62519]: DEBUG nova.compute.manager [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1829.723612] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d2806f10-2da5-44e0-8131-a6bdab586889 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "88f9351c-253b-49dd-a88e-b8585ea742ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.465s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.836348] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803082, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.182768] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.290267] env[62519]: DEBUG nova.network.neutron [req-7ec636cf-89ae-4985-a42a-5db28a3624e4 req-6976ad31-b56c-45eb-a7e4-f272a69ab7c0 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Updated VIF entry in instance network info cache for port 0f634cea-fa73-4a6c-855a-cee16f97b053. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1830.290694] env[62519]: DEBUG nova.network.neutron [req-7ec636cf-89ae-4985-a42a-5db28a3624e4 req-6976ad31-b56c-45eb-a7e4-f272a69ab7c0 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Updating instance_info_cache with network_info: [{"id": "0f634cea-fa73-4a6c-855a-cee16f97b053", "address": "fa:16:3e:51:09:24", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f634cea-fa", "ovs_interfaceid": "0f634cea-fa73-4a6c-855a-cee16f97b053", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.339011] env[62519]: DEBUG oslo_vmware.api [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803082, 'name': RemoveSnapshot_Task, 'duration_secs': 1.530958} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.340026] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1830.460469] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aedceb5-b9ee-4dc3-88a0-90ca4ca7a31b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.469527] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cedf8b2-5d28-4122-bc6d-ba1b4eb7a565 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.501048] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bb49bd-86b6-42a1-90e6-237d3fd3ed5e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.509910] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8c4bf2-0fe6-4e36-ae59-d55582c5a58a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.523654] env[62519]: DEBUG nova.compute.provider_tree [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1830.793231] env[62519]: DEBUG oslo_concurrency.lockutils [req-7ec636cf-89ae-4985-a42a-5db28a3624e4 req-6976ad31-b56c-45eb-a7e4-f272a69ab7c0 service nova] Releasing lock "refresh_cache-4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.846339] env[62519]: WARNING nova.compute.manager [None req-48bc5e60-2f25-476c-81c3-fd5f91814e5e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Image not found during snapshot: nova.exception.ImageNotFound: Image ed4e5c3e-d588-4416-a32b-8126c93c9db9 could not be found. [ 1831.027326] env[62519]: DEBUG nova.scheduler.client.report [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1831.532471] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.532992] env[62519]: DEBUG nova.compute.manager [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1831.537485] env[62519]: DEBUG oslo_concurrency.lockutils [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.905s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.537707] env[62519]: DEBUG nova.objects.instance [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lazy-loading 'resources' on Instance uuid 21daee7a-1788-4a1c-969e-65b696702bf2 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1831.791566] env[62519]: DEBUG oslo_concurrency.lockutils [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.791844] env[62519]: DEBUG oslo_concurrency.lockutils [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.792071] env[62519]: DEBUG oslo_concurrency.lockutils [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.792262] env[62519]: DEBUG oslo_concurrency.lockutils [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.792432] env[62519]: DEBUG oslo_concurrency.lockutils [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.794750] env[62519]: INFO nova.compute.manager [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Terminating instance [ 1831.936722] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.937132] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.041166] env[62519]: DEBUG nova.compute.utils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1832.045183] env[62519]: DEBUG nova.compute.manager [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1832.045356] env[62519]: DEBUG nova.network.neutron [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1832.084303] env[62519]: DEBUG nova.policy [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12d88579b24941a0be744afe44126360', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df6727c290724a8ebef5188c77e91399', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1832.298083] env[62519]: DEBUG nova.compute.manager [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1832.298303] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1832.299840] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba1c185-b8fa-439b-9c2e-ee250203281c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.307233] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1832.307487] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcba3e05-58e9-4004-8b32-26e8281bf470 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.310627] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7f41cc-b1c6-403d-944c-7cb05b977940 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.314454] env[62519]: DEBUG oslo_vmware.api [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1832.314454] env[62519]: value = "task-1803083" [ 1832.314454] env[62519]: _type = "Task" [ 1832.314454] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.320892] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9671eb3e-d2cd-467e-8f9b-65b7a22152f4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.328812] env[62519]: DEBUG oslo_vmware.api [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.363407] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d691103a-5738-431c-8844-67825e365df8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.372100] env[62519]: DEBUG nova.network.neutron [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Successfully created port: 95141eba-5eca-4c6b-9080-7a7b8921dbb7 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1832.375506] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a3efc3-f060-4d7f-bf50-cf746fd0c757 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.391908] env[62519]: DEBUG nova.compute.provider_tree [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1832.439194] env[62519]: DEBUG nova.compute.manager [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1832.546448] env[62519]: DEBUG nova.compute.manager [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1832.691516] env[62519]: DEBUG nova.compute.manager [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1832.692529] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1896029-7eb4-4f13-be1f-24a86969d039 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.826084] env[62519]: DEBUG oslo_vmware.api [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803083, 'name': PowerOffVM_Task, 'duration_secs': 0.203469} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.826390] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1832.826561] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1832.826816] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e42718c-709b-4628-80b8-5d8e05794234 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.895366] env[62519]: DEBUG nova.scheduler.client.report [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1832.912046] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1832.912281] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1832.912453] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Deleting the datastore file [datastore1] 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1832.912993] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc52c5d2-edf8-4c12-b1dc-52518bf64256 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.919156] env[62519]: DEBUG oslo_vmware.api [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1832.919156] env[62519]: value = "task-1803085" [ 1832.919156] env[62519]: _type = "Task" [ 1832.919156] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.927322] env[62519]: DEBUG oslo_vmware.api [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.964481] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.204024] env[62519]: INFO nova.compute.manager [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] instance snapshotting [ 1833.204398] env[62519]: DEBUG nova.objects.instance [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'flavor' on Instance uuid 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1833.400801] env[62519]: DEBUG oslo_concurrency.lockutils [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.863s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.403016] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.335s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.404502] env[62519]: INFO nova.compute.claims [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1833.419403] env[62519]: INFO nova.scheduler.client.report [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted allocations for instance 21daee7a-1788-4a1c-969e-65b696702bf2 [ 1833.434273] env[62519]: DEBUG oslo_vmware.api [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.458707} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.434561] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1833.434755] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1833.434956] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1833.435165] env[62519]: INFO nova.compute.manager [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1833.436818] env[62519]: DEBUG oslo.service.loopingcall [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1833.436818] env[62519]: DEBUG nova.compute.manager [-] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1833.436818] env[62519]: DEBUG nova.network.neutron [-] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1833.561883] env[62519]: DEBUG nova.compute.manager [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1833.583648] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1833.583889] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1833.584053] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1833.584236] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1833.584381] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1833.584721] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1833.584721] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1833.584871] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1833.585097] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1833.585939] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1833.585939] env[62519]: DEBUG nova.virt.hardware [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1833.586479] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557e8a13-840b-442f-a2be-8de90931b556 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.596238] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfdf566-e571-4798-9398-a8570b3eba51 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.711724] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6bd8a72-1996-4bd8-b506-98595d07e82a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.732998] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe4cd4e-6a22-4a4b-a5e6-ebc5f0045e0b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.747143] env[62519]: DEBUG nova.compute.manager [req-17c1122e-5de8-42a8-80fa-e90cb35a05fe req-2b8e06c4-e5fc-4d34-a097-31fba2519fe1 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Received event network-vif-deleted-59a0726b-5696-46b1-88ef-7b165b586888 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1833.747334] env[62519]: INFO nova.compute.manager [req-17c1122e-5de8-42a8-80fa-e90cb35a05fe req-2b8e06c4-e5fc-4d34-a097-31fba2519fe1 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Neutron deleted interface 59a0726b-5696-46b1-88ef-7b165b586888; detaching it from the instance and deleting it from the info cache [ 1833.747497] env[62519]: DEBUG nova.network.neutron [req-17c1122e-5de8-42a8-80fa-e90cb35a05fe req-2b8e06c4-e5fc-4d34-a097-31fba2519fe1 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.931825] env[62519]: DEBUG oslo_concurrency.lockutils [None req-68cf325b-36d5-4ace-bd3c-b52572e330d1 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "21daee7a-1788-4a1c-969e-65b696702bf2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.416s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.937647] env[62519]: DEBUG nova.network.neutron [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Successfully updated port: 95141eba-5eca-4c6b-9080-7a7b8921dbb7 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1834.062026] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "358ee402-1112-4eea-a518-a45a6bf92c31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.062382] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "358ee402-1112-4eea-a518-a45a6bf92c31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.062494] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "358ee402-1112-4eea-a518-a45a6bf92c31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.062735] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "358ee402-1112-4eea-a518-a45a6bf92c31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.062957] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "358ee402-1112-4eea-a518-a45a6bf92c31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.065213] env[62519]: INFO nova.compute.manager [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Terminating instance [ 1834.236325] env[62519]: DEBUG nova.network.neutron [-] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.243957] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1834.244375] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-05b00be7-eaf7-4051-9050-f677f3649607 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.250131] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0612d0b4-25b0-456f-93c0-a4afe701ab6a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.262089] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3190e1-f771-485e-a481-50d2eb3ebc90 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.272775] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1834.272775] env[62519]: value = "task-1803086" [ 1834.272775] env[62519]: _type = "Task" [ 1834.272775] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.282547] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803086, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.297176] env[62519]: DEBUG nova.compute.manager [req-17c1122e-5de8-42a8-80fa-e90cb35a05fe req-2b8e06c4-e5fc-4d34-a097-31fba2519fe1 service nova] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Detach interface failed, port_id=59a0726b-5696-46b1-88ef-7b165b586888, reason: Instance 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1834.439812] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "refresh_cache-e143206e-eb12-41b8-9140-229c1533fd80" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.439976] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "refresh_cache-e143206e-eb12-41b8-9140-229c1533fd80" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.440134] env[62519]: DEBUG nova.network.neutron [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1834.571015] env[62519]: DEBUG nova.compute.manager [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1834.571015] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1834.572366] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d1d353-acae-4c31-af0c-c7862662f29f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.581434] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1834.581656] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf43348a-4500-4754-812d-c69b12e1104b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.594182] env[62519]: DEBUG oslo_vmware.api [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1834.594182] env[62519]: value = "task-1803087" [ 1834.594182] env[62519]: _type = "Task" [ 1834.594182] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.606741] env[62519]: DEBUG oslo_vmware.api [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.676691] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fb30a4-1ca4-45dc-938a-8472898db1f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.685184] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ab9f2c-7b36-489e-b8a4-d9dad05a646b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.721717] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754dcd10-e8af-4bc8-9f1e-debdb95478c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.731729] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999d9e7f-3606-4804-ad6d-4b9b7f72da11 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.746439] env[62519]: INFO nova.compute.manager [-] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Took 1.31 seconds to deallocate network for instance. [ 1834.747154] env[62519]: DEBUG nova.compute.provider_tree [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.784303] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803086, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.970982] env[62519]: DEBUG nova.network.neutron [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1835.104534] env[62519]: DEBUG oslo_vmware.api [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803087, 'name': PowerOffVM_Task, 'duration_secs': 0.226155} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.104859] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1835.104993] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1835.105260] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-019ef7fd-84aa-42dd-af8f-fe63e7bdcb00 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.107995] env[62519]: DEBUG nova.network.neutron [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Updating instance_info_cache with network_info: [{"id": "95141eba-5eca-4c6b-9080-7a7b8921dbb7", "address": "fa:16:3e:bd:c2:2f", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95141eba-5e", "ovs_interfaceid": "95141eba-5eca-4c6b-9080-7a7b8921dbb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.187040] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1835.187040] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1835.187251] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleting the datastore file [datastore1] 358ee402-1112-4eea-a518-a45a6bf92c31 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1835.187453] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6aa445fa-294a-4e0f-abb5-7de3dc173893 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.194837] env[62519]: DEBUG oslo_vmware.api [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1835.194837] env[62519]: value = "task-1803089" [ 1835.194837] env[62519]: _type = "Task" [ 1835.194837] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.203311] env[62519]: DEBUG oslo_vmware.api [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.251242] env[62519]: DEBUG nova.scheduler.client.report [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1835.255649] env[62519]: DEBUG oslo_concurrency.lockutils [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.285037] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803086, 'name': CreateSnapshot_Task, 'duration_secs': 0.528126} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.285256] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1835.285992] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21cee43-e8d2-4178-bb67-3da0e771f500 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.611391] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "refresh_cache-e143206e-eb12-41b8-9140-229c1533fd80" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.611743] env[62519]: DEBUG nova.compute.manager [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Instance network_info: |[{"id": "95141eba-5eca-4c6b-9080-7a7b8921dbb7", "address": "fa:16:3e:bd:c2:2f", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95141eba-5e", "ovs_interfaceid": "95141eba-5eca-4c6b-9080-7a7b8921dbb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1835.612196] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:c2:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95141eba-5eca-4c6b-9080-7a7b8921dbb7', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1835.619814] env[62519]: DEBUG oslo.service.loopingcall [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.620056] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1835.620316] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79d1a79a-a423-4572-a9f5-8a850e1a9cca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.641698] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1835.641698] env[62519]: value = "task-1803090" [ 1835.641698] env[62519]: _type = "Task" [ 1835.641698] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.649952] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803090, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.709024] env[62519]: DEBUG oslo_vmware.api [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132119} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.710963] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1835.711220] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1835.711449] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1835.712875] env[62519]: INFO nova.compute.manager [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1835.713258] env[62519]: DEBUG oslo.service.loopingcall [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.713488] env[62519]: DEBUG nova.compute.manager [-] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1835.713619] env[62519]: DEBUG nova.network.neutron [-] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1835.757892] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.758423] env[62519]: DEBUG nova.compute.manager [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1835.762301] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.331s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.762685] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.765578] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.583s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.767837] env[62519]: INFO nova.compute.claims [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1835.777810] env[62519]: DEBUG nova.compute.manager [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Received event network-vif-plugged-95141eba-5eca-4c6b-9080-7a7b8921dbb7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1835.777964] env[62519]: DEBUG oslo_concurrency.lockutils [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] Acquiring lock "e143206e-eb12-41b8-9140-229c1533fd80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.778697] env[62519]: DEBUG oslo_concurrency.lockutils [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] Lock "e143206e-eb12-41b8-9140-229c1533fd80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.778697] env[62519]: DEBUG oslo_concurrency.lockutils [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] Lock "e143206e-eb12-41b8-9140-229c1533fd80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.778697] env[62519]: DEBUG nova.compute.manager [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] No waiting events found dispatching network-vif-plugged-95141eba-5eca-4c6b-9080-7a7b8921dbb7 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1835.778843] env[62519]: WARNING nova.compute.manager [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Received unexpected event network-vif-plugged-95141eba-5eca-4c6b-9080-7a7b8921dbb7 for instance with vm_state building and task_state spawning. [ 1835.779020] env[62519]: DEBUG nova.compute.manager [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Received event network-changed-95141eba-5eca-4c6b-9080-7a7b8921dbb7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1835.779237] env[62519]: DEBUG nova.compute.manager [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Refreshing instance network info cache due to event network-changed-95141eba-5eca-4c6b-9080-7a7b8921dbb7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1835.779617] env[62519]: DEBUG oslo_concurrency.lockutils [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] Acquiring lock "refresh_cache-e143206e-eb12-41b8-9140-229c1533fd80" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.779617] env[62519]: DEBUG oslo_concurrency.lockutils [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] Acquired lock "refresh_cache-e143206e-eb12-41b8-9140-229c1533fd80" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.779770] env[62519]: DEBUG nova.network.neutron [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Refreshing network info cache for port 95141eba-5eca-4c6b-9080-7a7b8921dbb7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1835.796028] env[62519]: INFO nova.scheduler.client.report [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted allocations for instance 5da884af-d8d2-409b-99bd-e5370e44e9f0 [ 1835.806032] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1835.810619] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-069ff05d-633f-47cc-8372-8741e424784f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.822940] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1835.822940] env[62519]: value = "task-1803091" [ 1835.822940] env[62519]: _type = "Task" [ 1835.822940] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.834164] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803091, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.154017] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803090, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.268764] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.268985] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.272543] env[62519]: DEBUG nova.compute.utils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1836.273823] env[62519]: DEBUG nova.compute.manager [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1836.274059] env[62519]: DEBUG nova.network.neutron [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1836.319746] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d244d491-3ba6-4ebd-9b9f-5ceff9c2adba tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "5da884af-d8d2-409b-99bd-e5370e44e9f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.284s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.333336] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803091, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.346446] env[62519]: DEBUG nova.policy [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbcfab2f663046fc8653a28f7cb80021', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4f35671cdf2742c2b85aeb34385e4d85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1836.505365] env[62519]: DEBUG nova.network.neutron [-] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.606517] env[62519]: DEBUG nova.network.neutron [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Updated VIF entry in instance network info cache for port 95141eba-5eca-4c6b-9080-7a7b8921dbb7. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1836.606873] env[62519]: DEBUG nova.network.neutron [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Updating instance_info_cache with network_info: [{"id": "95141eba-5eca-4c6b-9080-7a7b8921dbb7", "address": "fa:16:3e:bd:c2:2f", "network": {"id": "14e63db8-c42b-42e5-bd96-cab60c163758", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1328688204-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df6727c290724a8ebef5188c77e91399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95141eba-5e", "ovs_interfaceid": "95141eba-5eca-4c6b-9080-7a7b8921dbb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.652736] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803090, 'name': CreateVM_Task, 'duration_secs': 0.520576} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.652921] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1836.653875] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.653875] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.654106] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1836.654361] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c4b499c-528f-4f03-be08-2880eac3533e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.659370] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1836.659370] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52da69e3-9783-af00-4d98-5522ea35c078" [ 1836.659370] env[62519]: _type = "Task" [ 1836.659370] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.669881] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52da69e3-9783-af00-4d98-5522ea35c078, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.712119] env[62519]: DEBUG nova.network.neutron [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Successfully created port: ecfd75e5-0384-4fee-b19c-a0219648bc54 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1836.776678] env[62519]: DEBUG nova.compute.manager [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1836.782271] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.782271] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1836.782271] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 1836.837513] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803091, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.007567] env[62519]: INFO nova.compute.manager [-] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Took 1.29 seconds to deallocate network for instance. [ 1837.076724] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514a55fb-fd10-4544-827e-521a7ced3dfe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.085200] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e11d4fc-7d1d-456f-a159-6a85911ede9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.116502] env[62519]: DEBUG oslo_concurrency.lockutils [req-c3cba528-4899-4bc0-9cd5-5f63b33da6dd req-bc4697b6-2ea1-4ab5-a1de-a069e2977ecb service nova] Releasing lock "refresh_cache-e143206e-eb12-41b8-9140-229c1533fd80" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.117764] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6c7f06-aac4-4cf3-9d31-392ba6504225 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.125191] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4353e567-11c0-4d63-8e21-1ce067d9a907 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.138179] env[62519]: DEBUG nova.compute.provider_tree [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1837.170320] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52da69e3-9783-af00-4d98-5522ea35c078, 'name': SearchDatastore_Task, 'duration_secs': 0.009657} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.170745] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.170833] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1837.171069] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.171215] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.171391] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1837.171908] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cfd1f4fa-1baf-4fae-8eec-aef3f2f9c712 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.180820] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1837.181041] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1837.181853] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6437aa7f-13ea-45fb-8e1f-cccd36e02dc9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.188061] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1837.188061] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52490a85-b35c-d9be-38f7-7c7d3aeca0fe" [ 1837.188061] env[62519]: _type = "Task" [ 1837.188061] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.198838] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52490a85-b35c-d9be-38f7-7c7d3aeca0fe, 'name': SearchDatastore_Task, 'duration_secs': 0.009233} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.199600] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f31114a6-064f-4d74-958d-b5c0a89e3094 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.204466] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1837.204466] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525656c1-5d41-bb0e-0a14-d37eccd200b8" [ 1837.204466] env[62519]: _type = "Task" [ 1837.204466] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.211736] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525656c1-5d41-bb0e-0a14-d37eccd200b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.291573] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Skipping network cache update for instance because it is being deleted. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10435}} [ 1837.292207] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Skipping network cache update for instance because it is Building. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10431}} [ 1837.292207] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Skipping network cache update for instance because it is Building. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10431}} [ 1837.292207] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Skipping network cache update for instance because it is Building. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10431}} [ 1837.320402] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.320553] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquired lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.320699] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Forcefully refreshing network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1837.320846] env[62519]: DEBUG nova.objects.instance [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lazy-loading 'info_cache' on Instance uuid 11d4a010-959f-4f53-94dc-7499007612ad {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1837.334511] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803091, 'name': CloneVM_Task} progress is 95%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.514182] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.642026] env[62519]: DEBUG nova.scheduler.client.report [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1837.715295] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525656c1-5d41-bb0e-0a14-d37eccd200b8, 'name': SearchDatastore_Task, 'duration_secs': 0.008299} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.715561] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.715821] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] e143206e-eb12-41b8-9140-229c1533fd80/e143206e-eb12-41b8-9140-229c1533fd80.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1837.716088] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47da1e14-5fcf-474b-b6c0-419734ca6161 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.722918] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1837.722918] env[62519]: value = "task-1803092" [ 1837.722918] env[62519]: _type = "Task" [ 1837.722918] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.730969] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.796223] env[62519]: DEBUG nova.compute.manager [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1837.808698] env[62519]: DEBUG nova.compute.manager [req-306e13ca-d23f-42ec-91f9-bdf3220dac9e req-3f5fa864-6f33-4726-bdf6-b7796a26ce64 service nova] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Received event network-vif-deleted-87d0022f-848b-4f80-b7c3-e234bf681457 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1837.820797] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1837.820797] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1837.820797] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1837.821022] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1837.821105] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1837.821253] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1837.821460] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1837.821648] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1837.822168] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1837.822168] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1837.822168] env[62519]: DEBUG nova.virt.hardware [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1837.822989] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b936f492-87a3-4a51-aac0-083f280c015d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.838945] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803091, 'name': CloneVM_Task, 'duration_secs': 1.847453} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.840135] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1cd64c-04f0-4abd-9b78-96011a3c2838 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.843897] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Created linked-clone VM from snapshot [ 1837.845236] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81900389-b514-409c-a15f-6cd40874769e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.860600] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Uploading image bf703b2e-801c-4321-92c0-c8d0deb17ae0 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1837.883175] env[62519]: DEBUG oslo_vmware.rw_handles [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1837.883175] env[62519]: value = "vm-373824" [ 1837.883175] env[62519]: _type = "VirtualMachine" [ 1837.883175] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1837.883532] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d10a31d7-ea76-469a-b8eb-c9a970b0f9f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.891971] env[62519]: DEBUG oslo_vmware.rw_handles [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease: (returnval){ [ 1837.891971] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f1fcc7-9b00-2586-453d-27a8c12ef051" [ 1837.891971] env[62519]: _type = "HttpNfcLease" [ 1837.891971] env[62519]: } obtained for exporting VM: (result){ [ 1837.891971] env[62519]: value = "vm-373824" [ 1837.891971] env[62519]: _type = "VirtualMachine" [ 1837.891971] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1837.892334] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the lease: (returnval){ [ 1837.892334] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f1fcc7-9b00-2586-453d-27a8c12ef051" [ 1837.892334] env[62519]: _type = "HttpNfcLease" [ 1837.892334] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1837.899804] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1837.899804] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f1fcc7-9b00-2586-453d-27a8c12ef051" [ 1837.899804] env[62519]: _type = "HttpNfcLease" [ 1837.899804] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1838.148055] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.148055] env[62519]: DEBUG nova.compute.manager [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1838.151544] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.186s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.152541] env[62519]: INFO nova.compute.claims [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1838.236865] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803092, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50285} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.239023] env[62519]: DEBUG nova.network.neutron [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Successfully updated port: ecfd75e5-0384-4fee-b19c-a0219648bc54 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1838.240959] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] e143206e-eb12-41b8-9140-229c1533fd80/e143206e-eb12-41b8-9140-229c1533fd80.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1838.240959] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1838.240959] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15cd2e81-b0ec-4e1e-bd18-c04a38668483 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.248555] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1838.248555] env[62519]: value = "task-1803094" [ 1838.248555] env[62519]: _type = "Task" [ 1838.248555] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.259344] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803094, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.401076] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1838.401076] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f1fcc7-9b00-2586-453d-27a8c12ef051" [ 1838.401076] env[62519]: _type = "HttpNfcLease" [ 1838.401076] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1838.401374] env[62519]: DEBUG oslo_vmware.rw_handles [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1838.401374] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f1fcc7-9b00-2586-453d-27a8c12ef051" [ 1838.401374] env[62519]: _type = "HttpNfcLease" [ 1838.401374] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1838.402166] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d72e54-cb4e-4af9-8896-99eaf9f26afc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.409853] env[62519]: DEBUG oslo_vmware.rw_handles [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fad50-6d6d-f5b7-71c9-99a445b976d3/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1838.410042] env[62519]: DEBUG oslo_vmware.rw_handles [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fad50-6d6d-f5b7-71c9-99a445b976d3/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1838.519189] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-68eb1f56-19c0-4ac7-8ef3-b754a2dd3699 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.662219] env[62519]: DEBUG nova.compute.utils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1838.663800] env[62519]: DEBUG nova.compute.manager [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1838.663964] env[62519]: DEBUG nova.network.neutron [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1838.715833] env[62519]: DEBUG nova.policy [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b04fa80750184b97a16ec1880e0a585c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '683d60927fdf424386ffcfaa344a7af6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1838.744572] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquiring lock "refresh_cache-fcff790b-d267-4d8c-80d8-ad66cfb66539" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.744692] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquired lock "refresh_cache-fcff790b-d267-4d8c-80d8-ad66cfb66539" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.744931] env[62519]: DEBUG nova.network.neutron [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1838.760834] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803094, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106636} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.761405] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1838.762383] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ebcd63-7c23-4156-b022-9956ab005d1d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.788283] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] e143206e-eb12-41b8-9140-229c1533fd80/e143206e-eb12-41b8-9140-229c1533fd80.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1838.789120] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-328ccb46-93a2-4479-bac6-19f4d68efaf2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.810382] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1838.810382] env[62519]: value = "task-1803095" [ 1838.810382] env[62519]: _type = "Task" [ 1838.810382] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.823114] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803095, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.110604] env[62519]: DEBUG nova.network.neutron [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Successfully created port: 98c5e48e-5515-4c54-af43-86a9b283477d {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1839.168141] env[62519]: DEBUG nova.compute.manager [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1839.200042] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updating instance_info_cache with network_info: [{"id": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "address": "fa:16:3e:cc:ef:21", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1a3bbf-3f", "ovs_interfaceid": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.299246] env[62519]: DEBUG nova.network.neutron [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1839.336056] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803095, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.501682] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591947a4-2ee2-4689-873f-e40747286f03 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.510232] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce20fcd-dfa4-40e0-a1f5-7bf414e30dcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.544235] env[62519]: DEBUG nova.network.neutron [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Updating instance_info_cache with network_info: [{"id": "ecfd75e5-0384-4fee-b19c-a0219648bc54", "address": "fa:16:3e:17:e8:c3", "network": {"id": "b91f3bb4-711a-4581-83fd-8675780d31be", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2108310146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f35671cdf2742c2b85aeb34385e4d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecfd75e5-03", "ovs_interfaceid": "ecfd75e5-0384-4fee-b19c-a0219648bc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.546394] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426cb917-a327-4927-aef6-220cae9d5518 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.555337] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40744617-0870-4625-8836-e3988f18335d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.571028] env[62519]: DEBUG nova.compute.provider_tree [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1839.702677] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Releasing lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.702904] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updated the network info_cache for instance {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10489}} [ 1839.703170] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.703514] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.703639] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.703801] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.703957] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.704090] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_power_states {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1839.821568] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803095, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.834651] env[62519]: DEBUG nova.compute.manager [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Received event network-vif-plugged-ecfd75e5-0384-4fee-b19c-a0219648bc54 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1839.834965] env[62519]: DEBUG oslo_concurrency.lockutils [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] Acquiring lock "fcff790b-d267-4d8c-80d8-ad66cfb66539-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.835229] env[62519]: DEBUG oslo_concurrency.lockutils [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.835786] env[62519]: DEBUG oslo_concurrency.lockutils [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.835786] env[62519]: DEBUG nova.compute.manager [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] No waiting events found dispatching network-vif-plugged-ecfd75e5-0384-4fee-b19c-a0219648bc54 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1839.835943] env[62519]: WARNING nova.compute.manager [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Received unexpected event network-vif-plugged-ecfd75e5-0384-4fee-b19c-a0219648bc54 for instance with vm_state building and task_state spawning. [ 1839.836124] env[62519]: DEBUG nova.compute.manager [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Received event network-changed-ecfd75e5-0384-4fee-b19c-a0219648bc54 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1839.836535] env[62519]: DEBUG nova.compute.manager [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Refreshing instance network info cache due to event network-changed-ecfd75e5-0384-4fee-b19c-a0219648bc54. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1839.837545] env[62519]: DEBUG oslo_concurrency.lockutils [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] Acquiring lock "refresh_cache-fcff790b-d267-4d8c-80d8-ad66cfb66539" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.050379] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Releasing lock "refresh_cache-fcff790b-d267-4d8c-80d8-ad66cfb66539" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.050706] env[62519]: DEBUG nova.compute.manager [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Instance network_info: |[{"id": "ecfd75e5-0384-4fee-b19c-a0219648bc54", "address": "fa:16:3e:17:e8:c3", "network": {"id": "b91f3bb4-711a-4581-83fd-8675780d31be", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2108310146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f35671cdf2742c2b85aeb34385e4d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecfd75e5-03", "ovs_interfaceid": "ecfd75e5-0384-4fee-b19c-a0219648bc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1840.051020] env[62519]: DEBUG oslo_concurrency.lockutils [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] Acquired lock "refresh_cache-fcff790b-d267-4d8c-80d8-ad66cfb66539" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.051222] env[62519]: DEBUG nova.network.neutron [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Refreshing network info cache for port ecfd75e5-0384-4fee-b19c-a0219648bc54 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1840.052683] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:e8:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '97b68ed7-8461-4345-b064-96a1dde53a86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ecfd75e5-0384-4fee-b19c-a0219648bc54', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1840.060648] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Creating folder: Project (4f35671cdf2742c2b85aeb34385e4d85). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1840.061813] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bbdaeac-1cc2-4f0d-a627-e388f647d480 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.076346] env[62519]: DEBUG nova.scheduler.client.report [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1840.079868] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Created folder: Project (4f35671cdf2742c2b85aeb34385e4d85) in parent group-v373567. [ 1840.080104] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Creating folder: Instances. Parent ref: group-v373825. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1840.080566] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01308281-b602-455b-9f40-e1f6ae5c923a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.091357] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Created folder: Instances in parent group-v373825. [ 1840.091762] env[62519]: DEBUG oslo.service.loopingcall [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1840.092063] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1840.092375] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f20bdf4-d95c-461c-bfb7-64a806ba8cbb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.114812] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1840.114812] env[62519]: value = "task-1803098" [ 1840.114812] env[62519]: _type = "Task" [ 1840.114812] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.124317] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803098, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.180153] env[62519]: DEBUG nova.compute.manager [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1840.208816] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1840.209253] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1840.209564] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1840.209930] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1840.210190] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1840.210429] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1840.210733] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1840.210979] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1840.211249] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1840.211526] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1840.211829] env[62519]: DEBUG nova.virt.hardware [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1840.217512] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Getting list of instances from cluster (obj){ [ 1840.217512] env[62519]: value = "domain-c8" [ 1840.217512] env[62519]: _type = "ClusterComputeResource" [ 1840.217512] env[62519]: } {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1840.218442] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d37706f-005e-41ec-9f92-ec4d743541fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.223951] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671b7605-6004-410c-9bdf-2221e4afc098 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.235299] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbf152e-2c6e-466d-80cb-6f8a035e060d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.261994] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Got total of 14 instances {{(pid=62519) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1840.262376] env[62519]: WARNING nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] While synchronizing instance power states, found 19 instances in the database and 14 instances on the hypervisor. [ 1840.262682] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 11d4a010-959f-4f53-94dc-7499007612ad {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.263018] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.263350] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 46b3a0fb-29f6-4b66-a091-2d125b69d109 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.263632] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 417588f8-6288-4ecd-9764-dbc923549c5d {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.264378] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.264660] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 9f71845a-e80c-4822-b3de-717f1d83bc49 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.264875] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid ee4b10ba-1c56-47cf-a528-d6e65c286ddb {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.265092] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 2ea8304e-5b91-4908-a876-6e2c780b1da9 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.265307] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.265555] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 358ee402-1112-4eea-a518-a45a6bf92c31 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.265830] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.266181] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.266396] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.267302] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid f3665f89-1747-4567-9e56-c937d4ac81da {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.267302] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.267411] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid e143206e-eb12-41b8-9140-229c1533fd80 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.267668] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid fcff790b-d267-4d8c-80d8-ad66cfb66539 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.267983] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid eb5de0a4-0af3-4731-ab30-3ae3d72207a7 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.268269] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Triggering sync for uuid 646c9dfc-7b78-4cdb-b4f5-480c43af38c4 {{(pid=62519) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10828}} [ 1840.269333] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "11d4a010-959f-4f53-94dc-7499007612ad" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.269692] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "11d4a010-959f-4f53-94dc-7499007612ad" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.270114] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.270399] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.270746] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.271026] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.271408] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "417588f8-6288-4ecd-9764-dbc923549c5d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.271779] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "417588f8-6288-4ecd-9764-dbc923549c5d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.272206] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.272641] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.272979] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "9f71845a-e80c-4822-b3de-717f1d83bc49" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.273284] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.273547] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.273812] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.274184] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "2ea8304e-5b91-4908-a876-6e2c780b1da9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.274526] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.274970] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.275261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.275521] env[62519]: INFO nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] During sync_power_state the instance has a pending task (image_uploading). Skip. [ 1840.275809] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.276134] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "358ee402-1112-4eea-a518-a45a6bf92c31" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.276512] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.276810] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.277438] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.277735] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.278229] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.278542] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "f3665f89-1747-4567-9e56-c937d4ac81da" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.278816] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.279217] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.279487] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.279859] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "e143206e-eb12-41b8-9140-229c1533fd80" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.280216] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "fcff790b-d267-4d8c-80d8-ad66cfb66539" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.280590] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.281074] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.281446] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1840.281820] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1840.295037] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f04c84e-d211-437a-907e-8aa7e56217bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.299592] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2744a6ea-25a6-448e-b8ba-88e854047bfa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.303882] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8453a6-0eb6-44f7-b102-254161ef209f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.307738] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78f2b55-ce7b-4ed4-8097-c680e1c3e166 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.313908] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1dad49-543e-4432-ab77-dbf439e2b5b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.318395] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8356c83c-86bf-436d-b272-7957dccc0c71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.086368] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07765129-0b99-4682-8eac-322bbdd6c85a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.090223] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e100fff9-33f7-4bd2-862d-1fb87cd0d7e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.093578] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94e5b77-3dde-4cf3-883b-cb88745dec95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.096697] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5b7548-aa73-4fd7-a4cc-addcff1a21fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.099816] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15155fa-05ba-4ff0-a85a-61120d007f6e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.103023] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395cda12-ea60-47fb-8e5d-b2103d271d3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.105613] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.113688] env[62519]: DEBUG nova.network.neutron [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Successfully updated port: 98c5e48e-5515-4c54-af43-86a9b283477d {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1841.117647] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.967s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.118122] env[62519]: DEBUG nova.compute.manager [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1841.133886] env[62519]: DEBUG oslo_concurrency.lockutils [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.878s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.134254] env[62519]: DEBUG nova.objects.instance [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lazy-loading 'resources' on Instance uuid 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1841.185775] env[62519]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1841.190788] env[62519]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1841.196941] env[62519]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1841.200948] env[62519]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1841.206241] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803095, 'name': ReconfigVM_Task, 'duration_secs': 1.488827} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.206961] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803098, 'name': CreateVM_Task, 'duration_secs': 0.359722} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.207353] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Reconfigured VM instance instance-00000061 to attach disk [datastore1] e143206e-eb12-41b8-9140-229c1533fd80/e143206e-eb12-41b8-9140-229c1533fd80.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1841.207918] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1841.208847] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7d60e65-afe6-4cd3-84d5-1e8505d225c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.211129] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.211907] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.211907] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1841.212984] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51fe0702-2bea-4754-9ab2-09a71bc37953 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.219909] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1841.219909] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5274b5ca-1850-6c46-a2eb-03c4bc3827f3" [ 1841.219909] env[62519]: _type = "Task" [ 1841.219909] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.227507] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1841.227507] env[62519]: value = "task-1803099" [ 1841.227507] env[62519]: _type = "Task" [ 1841.227507] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.235947] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5274b5ca-1850-6c46-a2eb-03c4bc3827f3, 'name': SearchDatastore_Task, 'duration_secs': 0.01205} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.236690] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.236934] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1841.237215] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.237418] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.237612] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1841.238425] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa5ed668-ae9e-4b58-b454-602a88f8978b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.244250] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803099, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.253235] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1841.253489] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1841.254679] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d92cc3c-b594-4e81-ba74-048e0a03f456 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.261544] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1841.261544] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ab6eeb-7799-159c-1097-93d4fdfbcfda" [ 1841.261544] env[62519]: _type = "Task" [ 1841.261544] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.271151] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ab6eeb-7799-159c-1097-93d4fdfbcfda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.583489] env[62519]: DEBUG nova.network.neutron [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Updated VIF entry in instance network info cache for port ecfd75e5-0384-4fee-b19c-a0219648bc54. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1841.583873] env[62519]: DEBUG nova.network.neutron [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Updating instance_info_cache with network_info: [{"id": "ecfd75e5-0384-4fee-b19c-a0219648bc54", "address": "fa:16:3e:17:e8:c3", "network": {"id": "b91f3bb4-711a-4581-83fd-8675780d31be", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2108310146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4f35671cdf2742c2b85aeb34385e4d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecfd75e5-03", "ovs_interfaceid": "ecfd75e5-0384-4fee-b19c-a0219648bc54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.622774] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.623378] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.623527] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.623669] env[62519]: DEBUG nova.network.neutron [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1841.626050] env[62519]: DEBUG nova.compute.utils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1841.627041] env[62519]: DEBUG nova.compute.manager [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1841.627144] env[62519]: DEBUG nova.network.neutron [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1841.666291] env[62519]: DEBUG nova.policy [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81bb350c0ff54453b99b45ac84a82935', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '549cc35f5ff249f6bf22c67872883db0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1841.678019] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.401s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.678019] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.399s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.688763] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.414s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.689263] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "417588f8-6288-4ecd-9764-dbc923549c5d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.417s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.690715] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.420s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.693172] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "11d4a010-959f-4f53-94dc-7499007612ad" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.424s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.693473] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.420s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.697367] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.419s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.707911] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.430s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.730962] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.458s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.731338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.455s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.731729] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.452s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.741230] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803099, 'name': Rename_Task, 'duration_secs': 0.181799} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.743762] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1841.744199] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6803865-1811-4041-b424-08a33b7f83c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.753195] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1841.753195] env[62519]: value = "task-1803100" [ 1841.753195] env[62519]: _type = "Task" [ 1841.753195] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.767923] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803100, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.779051] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ab6eeb-7799-159c-1097-93d4fdfbcfda, 'name': SearchDatastore_Task, 'duration_secs': 0.0107} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.780901] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a817bca7-3912-4c38-9fd9-afb199dac399 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.789300] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1841.789300] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52766136-f4b7-e2cc-c75d-e8624392ef88" [ 1841.789300] env[62519]: _type = "Task" [ 1841.789300] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.800154] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52766136-f4b7-e2cc-c75d-e8624392ef88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.863944] env[62519]: DEBUG nova.compute.manager [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Received event network-vif-plugged-98c5e48e-5515-4c54-af43-86a9b283477d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1841.864188] env[62519]: DEBUG oslo_concurrency.lockutils [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] Acquiring lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.864399] env[62519]: DEBUG oslo_concurrency.lockutils [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.864564] env[62519]: DEBUG oslo_concurrency.lockutils [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.864724] env[62519]: DEBUG nova.compute.manager [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] No waiting events found dispatching network-vif-plugged-98c5e48e-5515-4c54-af43-86a9b283477d {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1841.864882] env[62519]: WARNING nova.compute.manager [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Received unexpected event network-vif-plugged-98c5e48e-5515-4c54-af43-86a9b283477d for instance with vm_state building and task_state spawning. [ 1841.865047] env[62519]: DEBUG nova.compute.manager [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Received event network-changed-98c5e48e-5515-4c54-af43-86a9b283477d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1841.865204] env[62519]: DEBUG nova.compute.manager [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Refreshing instance network info cache due to event network-changed-98c5e48e-5515-4c54-af43-86a9b283477d. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1841.865432] env[62519]: DEBUG oslo_concurrency.lockutils [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] Acquiring lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.943347] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b24922b-b74b-4945-9984-f00b5af809f4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.952784] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89d2b79-c4bf-4d3f-b6b1-cf441419d6a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.957049] env[62519]: DEBUG nova.network.neutron [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Successfully created port: cb01cf13-b6c4-4f35-b75b-86f1ba67b87a {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1841.991749] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e74d411-cfaa-40af-9f51-b2a5932e3419 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.000903] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1861de1c-82b6-4401-b30f-460d66c9ed60 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.018047] env[62519]: DEBUG nova.compute.provider_tree [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.086733] env[62519]: DEBUG oslo_concurrency.lockutils [req-32d9794c-8305-4501-822a-86b0ec635345 req-c0cfeba9-3bcd-4cf7-9ea8-bced3b77a5eb service nova] Releasing lock "refresh_cache-fcff790b-d267-4d8c-80d8-ad66cfb66539" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.129658] env[62519]: DEBUG nova.compute.manager [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1842.177675] env[62519]: DEBUG nova.network.neutron [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1842.266407] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803100, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.305055] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52766136-f4b7-e2cc-c75d-e8624392ef88, 'name': SearchDatastore_Task, 'duration_secs': 0.011598} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.305055] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.305055] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] fcff790b-d267-4d8c-80d8-ad66cfb66539/fcff790b-d267-4d8c-80d8-ad66cfb66539.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1842.305055] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b567a54-68cc-4935-8c21-e0b0616a1333 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.310711] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1842.310711] env[62519]: value = "task-1803101" [ 1842.310711] env[62519]: _type = "Task" [ 1842.310711] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.319958] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.362918] env[62519]: DEBUG nova.network.neutron [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance_info_cache with network_info: [{"id": "98c5e48e-5515-4c54-af43-86a9b283477d", "address": "fa:16:3e:72:79:66", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c5e48e-55", "ovs_interfaceid": "98c5e48e-5515-4c54-af43-86a9b283477d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.522275] env[62519]: DEBUG nova.scheduler.client.report [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1842.771280] env[62519]: DEBUG oslo_vmware.api [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803100, 'name': PowerOnVM_Task, 'duration_secs': 0.548797} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.771682] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1842.771839] env[62519]: INFO nova.compute.manager [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Took 9.21 seconds to spawn the instance on the hypervisor. [ 1842.772064] env[62519]: DEBUG nova.compute.manager [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1842.772877] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce71128-75b0-4fb9-bad0-478030f54d54 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.820944] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803101, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479365} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.821233] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] fcff790b-d267-4d8c-80d8-ad66cfb66539/fcff790b-d267-4d8c-80d8-ad66cfb66539.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1842.821445] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1842.821709] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55d22f16-d6e5-458c-ab70-eb4916a3872e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.830116] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1842.830116] env[62519]: value = "task-1803102" [ 1842.830116] env[62519]: _type = "Task" [ 1842.830116] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.840429] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.865663] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.866091] env[62519]: DEBUG nova.compute.manager [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Instance network_info: |[{"id": "98c5e48e-5515-4c54-af43-86a9b283477d", "address": "fa:16:3e:72:79:66", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c5e48e-55", "ovs_interfaceid": "98c5e48e-5515-4c54-af43-86a9b283477d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1842.866415] env[62519]: DEBUG oslo_concurrency.lockutils [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] Acquired lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.866601] env[62519]: DEBUG nova.network.neutron [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Refreshing network info cache for port 98c5e48e-5515-4c54-af43-86a9b283477d {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1842.867848] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:79:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98c5e48e-5515-4c54-af43-86a9b283477d', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1842.876711] env[62519]: DEBUG oslo.service.loopingcall [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1842.877900] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1842.878148] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f77814c-b8e2-4079-8081-f3e526df210e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.900104] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1842.900104] env[62519]: value = "task-1803103" [ 1842.900104] env[62519]: _type = "Task" [ 1842.900104] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.910677] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803103, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.028418] env[62519]: DEBUG oslo_concurrency.lockutils [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.032250] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.517s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.032740] env[62519]: DEBUG nova.objects.instance [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lazy-loading 'resources' on Instance uuid 358ee402-1112-4eea-a518-a45a6bf92c31 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1843.057943] env[62519]: INFO nova.scheduler.client.report [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Deleted allocations for instance 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2 [ 1843.139712] env[62519]: DEBUG nova.compute.manager [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1843.163916] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1843.164217] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1843.164360] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1843.164514] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1843.164698] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1843.164790] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1843.164988] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1843.165156] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1843.165321] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1843.165481] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1843.165647] env[62519]: DEBUG nova.virt.hardware [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1843.166561] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81447af-0090-4fe7-a5f4-9de343fabbf1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.175662] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9092287c-a66b-4d4b-888d-3971a9b57f52 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.290676] env[62519]: INFO nova.compute.manager [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Took 18.54 seconds to build instance. [ 1843.341698] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07988} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.341963] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1843.343266] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a522f6ea-f024-445b-84d9-4e03c87e8703 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.368523] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] fcff790b-d267-4d8c-80d8-ad66cfb66539/fcff790b-d267-4d8c-80d8-ad66cfb66539.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1843.368960] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffde07dd-bbfb-49bb-9a0b-1fb2e9a0121f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.397500] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1843.397500] env[62519]: value = "task-1803104" [ 1843.397500] env[62519]: _type = "Task" [ 1843.397500] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.406513] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803104, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.414040] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803103, 'name': CreateVM_Task, 'duration_secs': 0.384094} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.414040] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1843.414040] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.414298] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.414517] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1843.414787] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b43e6e9f-9d1f-428a-9f01-0efedf09e492 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.422292] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1843.422292] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524bb90d-8967-0d22-465c-aadd5de21d11" [ 1843.422292] env[62519]: _type = "Task" [ 1843.422292] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.435221] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524bb90d-8967-0d22-465c-aadd5de21d11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.566246] env[62519]: DEBUG oslo_concurrency.lockutils [None req-10486ef6-1fb8-4a31-906c-fdcc5177956e tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.774s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.567448] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.289s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.567984] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91b16595-7849-4723-8764-0bd7b8b5d8d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.584607] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29878fb-d586-482d-9bcb-76cb53416507 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.733279] env[62519]: DEBUG nova.network.neutron [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Successfully updated port: cb01cf13-b6c4-4f35-b75b-86f1ba67b87a {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1843.771982] env[62519]: DEBUG nova.network.neutron [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updated VIF entry in instance network info cache for port 98c5e48e-5515-4c54-af43-86a9b283477d. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1843.771982] env[62519]: DEBUG nova.network.neutron [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance_info_cache with network_info: [{"id": "98c5e48e-5515-4c54-af43-86a9b283477d", "address": "fa:16:3e:72:79:66", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c5e48e-55", "ovs_interfaceid": "98c5e48e-5515-4c54-af43-86a9b283477d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.867146] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7eb2de3-77b5-4f02-9156-03a6d5616b7d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.875939] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3775d04-47fd-4536-a467-6c2ce3684387 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.910413] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b02d32-1caa-4aeb-89bc-61047191c3aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.921478] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52386e4c-424b-4705-86f1-5e3c3f82f5ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.925751] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803104, 'name': ReconfigVM_Task, 'duration_secs': 0.307407} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.926142] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Reconfigured VM instance instance-00000062 to attach disk [datastore1] fcff790b-d267-4d8c-80d8-ad66cfb66539/fcff790b-d267-4d8c-80d8-ad66cfb66539.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1843.929925] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a18b097-829f-420f-8487-f3616c55dce8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.941688] env[62519]: DEBUG nova.compute.provider_tree [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1843.950422] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524bb90d-8967-0d22-465c-aadd5de21d11, 'name': SearchDatastore_Task, 'duration_secs': 0.014875} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.952451] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.952689] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1843.952922] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.953083] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.953299] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1843.953617] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1843.953617] env[62519]: value = "task-1803105" [ 1843.953617] env[62519]: _type = "Task" [ 1843.953617] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.954072] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7eec84e-6808-4278-9af4-f57e92f5aeaf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.967547] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803105, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.969595] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1843.969779] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1843.970840] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a6ef1cb-8a71-4e5f-83bd-399b18f026a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.976793] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1843.976793] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5209303c-0c70-b6c4-f075-194f3c16b4ef" [ 1843.976793] env[62519]: _type = "Task" [ 1843.976793] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.986294] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5209303c-0c70-b6c4-f075-194f3c16b4ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.106450] env[62519]: DEBUG nova.compute.manager [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Received event network-vif-plugged-cb01cf13-b6c4-4f35-b75b-86f1ba67b87a {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1844.106912] env[62519]: DEBUG oslo_concurrency.lockutils [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] Acquiring lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.107306] env[62519]: DEBUG oslo_concurrency.lockutils [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.107428] env[62519]: DEBUG oslo_concurrency.lockutils [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.107605] env[62519]: DEBUG nova.compute.manager [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] No waiting events found dispatching network-vif-plugged-cb01cf13-b6c4-4f35-b75b-86f1ba67b87a {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1844.107769] env[62519]: WARNING nova.compute.manager [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Received unexpected event network-vif-plugged-cb01cf13-b6c4-4f35-b75b-86f1ba67b87a for instance with vm_state building and task_state spawning. [ 1844.107925] env[62519]: DEBUG nova.compute.manager [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Received event network-changed-cb01cf13-b6c4-4f35-b75b-86f1ba67b87a {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1844.108100] env[62519]: DEBUG nova.compute.manager [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Refreshing instance network info cache due to event network-changed-cb01cf13-b6c4-4f35-b75b-86f1ba67b87a. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1844.108314] env[62519]: DEBUG oslo_concurrency.lockutils [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] Acquiring lock "refresh_cache-646c9dfc-7b78-4cdb-b4f5-480c43af38c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.108407] env[62519]: DEBUG oslo_concurrency.lockutils [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] Acquired lock "refresh_cache-646c9dfc-7b78-4cdb-b4f5-480c43af38c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.108544] env[62519]: DEBUG nova.network.neutron [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Refreshing network info cache for port cb01cf13-b6c4-4f35-b75b-86f1ba67b87a {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1844.159132] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.592s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.236044] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "refresh_cache-646c9dfc-7b78-4cdb-b4f5-480c43af38c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.276240] env[62519]: DEBUG oslo_concurrency.lockutils [req-be7d4aba-df67-4508-8caa-96b99c97c918 req-c8996157-3820-4dcc-b3e3-d1309c27a8ba service nova] Releasing lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.446186] env[62519]: DEBUG nova.scheduler.client.report [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1844.468390] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803105, 'name': Rename_Task, 'duration_secs': 0.158125} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.468785] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1844.468968] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6a41dcf-6b51-4fed-a485-ca32043d94cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.477241] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1844.477241] env[62519]: value = "task-1803106" [ 1844.477241] env[62519]: _type = "Task" [ 1844.477241] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.490223] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803106, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.494333] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5209303c-0c70-b6c4-f075-194f3c16b4ef, 'name': SearchDatastore_Task, 'duration_secs': 0.015971} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.495172] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78ff8895-1047-49e3-a772-2bbe0ee904ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.502560] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1844.502560] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e63d04-4b6a-7f7f-9eb7-a2ebe13faafc" [ 1844.502560] env[62519]: _type = "Task" [ 1844.502560] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.513457] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e63d04-4b6a-7f7f-9eb7-a2ebe13faafc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.648377] env[62519]: DEBUG nova.network.neutron [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1844.735269] env[62519]: DEBUG nova.network.neutron [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.799904] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cb53b5a9-1695-40be-9a5a-731401609236 tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "e143206e-eb12-41b8-9140-229c1533fd80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.077s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.800297] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "e143206e-eb12-41b8-9140-229c1533fd80" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.520s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.800543] env[62519]: INFO nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: e143206e-eb12-41b8-9140-229c1533fd80] During sync_power_state the instance has a pending task (spawning). Skip. [ 1844.800748] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "e143206e-eb12-41b8-9140-229c1533fd80" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.952228] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.921s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.955261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.333s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.956133] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.956463] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1844.957810] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13dea70d-7d25-4292-a5f6-2dbd96ac017d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.970926] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b2b624-efa9-4e19-887f-6c3177f86a90 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.992893] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803106, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.006018] env[62519]: INFO nova.scheduler.client.report [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted allocations for instance 358ee402-1112-4eea-a518-a45a6bf92c31 [ 1845.006018] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25858c15-7e67-40c9-ae56-04976c1dde51 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.023462] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e63d04-4b6a-7f7f-9eb7-a2ebe13faafc, 'name': SearchDatastore_Task, 'duration_secs': 0.025645} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.025400] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.025529] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] eb5de0a4-0af3-4731-ab30-3ae3d72207a7/eb5de0a4-0af3-4731-ab30-3ae3d72207a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1845.026148] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-148a0cd6-8b76-48a5-bf98-447f8414a49e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.028680] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6790c78d-c9db-4e0c-b9b0-0504c6026859 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.065338] env[62519]: DEBUG oslo_concurrency.lockutils [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "e143206e-eb12-41b8-9140-229c1533fd80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.066022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "e143206e-eb12-41b8-9140-229c1533fd80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.066106] env[62519]: DEBUG oslo_concurrency.lockutils [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "e143206e-eb12-41b8-9140-229c1533fd80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.066266] env[62519]: DEBUG oslo_concurrency.lockutils [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "e143206e-eb12-41b8-9140-229c1533fd80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.066434] env[62519]: DEBUG oslo_concurrency.lockutils [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "e143206e-eb12-41b8-9140-229c1533fd80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.068394] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179514MB free_disk=157GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1845.068394] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.068508] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.071742] env[62519]: INFO nova.compute.manager [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Terminating instance [ 1845.074123] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1845.074123] env[62519]: value = "task-1803107" [ 1845.074123] env[62519]: _type = "Task" [ 1845.074123] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.089026] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.238532] env[62519]: DEBUG oslo_concurrency.lockutils [req-c91ec97b-fd74-4acf-82bc-c2efb90f5b2b req-2b3c9232-01e9-4a59-a21d-97fbd5d12ad6 service nova] Releasing lock "refresh_cache-646c9dfc-7b78-4cdb-b4f5-480c43af38c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.238926] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "refresh_cache-646c9dfc-7b78-4cdb-b4f5-480c43af38c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.239106] env[62519]: DEBUG nova.network.neutron [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1845.492566] env[62519]: DEBUG oslo_vmware.api [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803106, 'name': PowerOnVM_Task, 'duration_secs': 0.933714} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.492904] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1845.493121] env[62519]: INFO nova.compute.manager [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Took 7.70 seconds to spawn the instance on the hypervisor. [ 1845.493447] env[62519]: DEBUG nova.compute.manager [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1845.495028] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0c8cd6-8720-4e82-9235-43158ea7949e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.519507] env[62519]: DEBUG oslo_concurrency.lockutils [None req-38ddf53a-539a-4d8d-957a-9eb72ff6a4fd tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "358ee402-1112-4eea-a518-a45a6bf92c31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.457s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.520652] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "358ee402-1112-4eea-a518-a45a6bf92c31" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.244s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.521196] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62f6b4e6-09d5-43ce-acba-77323acf2bf7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.534847] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e0720a-5040-4ddf-9f9a-9cc8f9f42b83 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.588576] env[62519]: DEBUG nova.compute.manager [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1845.588576] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1845.592683] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6e5b79-ec7c-4490-a095-924021e2a20f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.605030] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1845.607624] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1692e0e-687d-4680-9f6b-b471fca05075 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.609767] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803107, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.618951] env[62519]: DEBUG oslo_vmware.api [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1845.618951] env[62519]: value = "task-1803108" [ 1845.618951] env[62519]: _type = "Task" [ 1845.618951] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.630195] env[62519]: DEBUG oslo_vmware.api [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803108, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.721770] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.722048] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.773983] env[62519]: DEBUG nova.network.neutron [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1845.925135] env[62519]: DEBUG nova.network.neutron [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Updating instance_info_cache with network_info: [{"id": "cb01cf13-b6c4-4f35-b75b-86f1ba67b87a", "address": "fa:16:3e:53:b3:0f", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb01cf13-b6", "ovs_interfaceid": "cb01cf13-b6c4-4f35-b75b-86f1ba67b87a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.017066] env[62519]: INFO nova.compute.manager [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Took 19.97 seconds to build instance. [ 1846.093424] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "358ee402-1112-4eea-a518-a45a6bf92c31" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.573s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.100782] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803107, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.645405} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.101281] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] eb5de0a4-0af3-4731-ab30-3ae3d72207a7/eb5de0a4-0af3-4731-ab30-3ae3d72207a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1846.101803] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1846.102172] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5354b19a-d56f-4825-9d15-f665c7033389 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.112758] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 11d4a010-959f-4f53-94dc-7499007612ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.113127] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.113394] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 46b3a0fb-29f6-4b66-a091-2d125b69d109 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.113645] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 417588f8-6288-4ecd-9764-dbc923549c5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.113902] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 99f22198-1a65-4d0d-b665-90c7063dbdb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.114138] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 9f71845a-e80c-4822-b3de-717f1d83bc49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.114388] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance ee4b10ba-1c56-47cf-a528-d6e65c286ddb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115380] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 2ea8304e-5b91-4908-a876-6e2c780b1da9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115380] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4a0f7975-5a07-4593-ae71-cabebdefe0fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115380] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115380] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115380] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f3665f89-1747-4567-9e56-c937d4ac81da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115380] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115902] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance e143206e-eb12-41b8-9140-229c1533fd80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115902] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance fcff790b-d267-4d8c-80d8-ad66cfb66539 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115902] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance eb5de0a4-0af3-4731-ab30-3ae3d72207a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.115902] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 646c9dfc-7b78-4cdb-b4f5-480c43af38c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1846.119234] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1846.119234] env[62519]: value = "task-1803109" [ 1846.119234] env[62519]: _type = "Task" [ 1846.119234] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.133860] env[62519]: DEBUG oslo_vmware.api [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803108, 'name': PowerOffVM_Task, 'duration_secs': 0.247791} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.137008] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1846.137245] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1846.137569] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803109, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.137816] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c40501ed-faec-4ac5-a6b6-d7ac3a751e5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.220304] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1846.220452] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1846.220613] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleting the datastore file [datastore1] e143206e-eb12-41b8-9140-229c1533fd80 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1846.220879] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-822e5b2c-e8e4-401f-9eb2-e6c535c3a793 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.224846] env[62519]: DEBUG nova.compute.manager [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1846.228938] env[62519]: DEBUG oslo_vmware.api [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for the task: (returnval){ [ 1846.228938] env[62519]: value = "task-1803111" [ 1846.228938] env[62519]: _type = "Task" [ 1846.228938] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.237753] env[62519]: DEBUG oslo_vmware.api [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.341022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquiring lock "fcff790b-d267-4d8c-80d8-ad66cfb66539" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.428136] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "refresh_cache-646c9dfc-7b78-4cdb-b4f5-480c43af38c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.428526] env[62519]: DEBUG nova.compute.manager [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Instance network_info: |[{"id": "cb01cf13-b6c4-4f35-b75b-86f1ba67b87a", "address": "fa:16:3e:53:b3:0f", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb01cf13-b6", "ovs_interfaceid": "cb01cf13-b6c4-4f35-b75b-86f1ba67b87a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1846.429034] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:b3:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb01cf13-b6c4-4f35-b75b-86f1ba67b87a', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1846.436837] env[62519]: DEBUG oslo.service.loopingcall [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1846.437094] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1846.437373] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8c23b8b-87dc-4cfa-ae29-5c0d5ee210d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.458524] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1846.458524] env[62519]: value = "task-1803112" [ 1846.458524] env[62519]: _type = "Task" [ 1846.458524] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.467923] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803112, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.621258] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 70abb2e0-1ff2-49dd-b40f-9cac244a249e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1846.621777] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1846.621959] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3776MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1846.636715] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803109, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.137383} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.636715] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1846.636715] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ed5483-df48-4ca2-bffe-1bb50aa980bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.661880] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] eb5de0a4-0af3-4731-ab30-3ae3d72207a7/eb5de0a4-0af3-4731-ab30-3ae3d72207a7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1846.665143] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0676ae71-53aa-49e9-8ce5-f899e2599461 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.690344] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1846.690344] env[62519]: value = "task-1803113" [ 1846.690344] env[62519]: _type = "Task" [ 1846.690344] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.702223] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803113, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.748584] env[62519]: DEBUG oslo_vmware.api [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Task: {'id': task-1803111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.509845} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.750217] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1846.750888] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1846.750888] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1846.750888] env[62519]: INFO nova.compute.manager [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1846.751015] env[62519]: DEBUG oslo.service.loopingcall [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1846.755992] env[62519]: DEBUG nova.compute.manager [-] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1846.755992] env[62519]: DEBUG nova.network.neutron [-] [instance: e143206e-eb12-41b8-9140-229c1533fd80] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1846.757825] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "9327a897-8e4f-4c59-952e-aecfac4028e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.758066] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "9327a897-8e4f-4c59-952e-aecfac4028e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.759968] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.936025] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa7ce39-ad7b-47c3-b96b-e7cc58e527ef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.944195] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b4a8d5-e008-41d7-bbb9-175fe4866111 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.982644] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e736d47-be65-430d-bae8-5d3d38173121 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.994196] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ce3654-e013-4f98-b1e2-5ec0077533c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.998160] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803112, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.010831] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1847.052035] env[62519]: DEBUG nova.compute.manager [req-bb132efe-6366-457a-9233-5ebbab723f47 req-07eaba1c-8533-4e0e-a9bb-8ece24dc46ee service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Received event network-vif-deleted-95141eba-5eca-4c6b-9080-7a7b8921dbb7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1847.052297] env[62519]: INFO nova.compute.manager [req-bb132efe-6366-457a-9233-5ebbab723f47 req-07eaba1c-8533-4e0e-a9bb-8ece24dc46ee service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Neutron deleted interface 95141eba-5eca-4c6b-9080-7a7b8921dbb7; detaching it from the instance and deleting it from the info cache [ 1847.052621] env[62519]: DEBUG nova.network.neutron [req-bb132efe-6366-457a-9233-5ebbab723f47 req-07eaba1c-8533-4e0e-a9bb-8ece24dc46ee service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.201381] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803113, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.263043] env[62519]: DEBUG nova.compute.manager [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1847.301114] env[62519]: DEBUG oslo_vmware.rw_handles [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fad50-6d6d-f5b7-71c9-99a445b976d3/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1847.302226] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704c8c65-d621-4bcb-9ede-2fc886da2a1d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.308647] env[62519]: DEBUG oslo_vmware.rw_handles [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fad50-6d6d-f5b7-71c9-99a445b976d3/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1847.308815] env[62519]: ERROR oslo_vmware.rw_handles [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fad50-6d6d-f5b7-71c9-99a445b976d3/disk-0.vmdk due to incomplete transfer. [ 1847.309030] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-29188ab8-7754-4329-b727-ab0cfcce1071 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.316951] env[62519]: DEBUG oslo_vmware.rw_handles [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526fad50-6d6d-f5b7-71c9-99a445b976d3/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1847.317548] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Uploaded image bf703b2e-801c-4321-92c0-c8d0deb17ae0 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1847.319742] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1847.319975] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-84f9e84e-89b0-4c76-a4e2-ff516940a039 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.326583] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1847.326583] env[62519]: value = "task-1803114" [ 1847.326583] env[62519]: _type = "Task" [ 1847.326583] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.335047] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803114, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.491811] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803112, 'name': CreateVM_Task, 'duration_secs': 0.539273} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.491987] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1847.492741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.492915] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.493269] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1847.493531] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea2e6b44-f165-4487-bab4-c049a89ee58f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.498954] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1847.498954] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a989e2-7464-0fb0-dac5-10c628489e63" [ 1847.498954] env[62519]: _type = "Task" [ 1847.498954] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.507610] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a989e2-7464-0fb0-dac5-10c628489e63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.516737] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1847.529667] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb47b509-8873-40b4-9991-f74edf23e568 tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.492s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.529938] env[62519]: DEBUG nova.network.neutron [-] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.531067] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.251s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.531259] env[62519]: INFO nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] During sync_power_state the instance has a pending task (spawning). Skip. [ 1847.531435] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.532627] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.192s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.532842] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquiring lock "fcff790b-d267-4d8c-80d8-ad66cfb66539-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.533059] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.533227] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.538847] env[62519]: INFO nova.compute.manager [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Terminating instance [ 1847.555695] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-217bc21f-34bc-4d20-b88e-8d463e80d116 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.570019] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed8d3d3-2a11-4eb6-80a3-e9133516b2dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.622534] env[62519]: DEBUG nova.compute.manager [req-bb132efe-6366-457a-9233-5ebbab723f47 req-07eaba1c-8533-4e0e-a9bb-8ece24dc46ee service nova] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Detach interface failed, port_id=95141eba-5eca-4c6b-9080-7a7b8921dbb7, reason: Instance e143206e-eb12-41b8-9140-229c1533fd80 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1847.702120] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803113, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.786508] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.838463] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803114, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.009422] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a989e2-7464-0fb0-dac5-10c628489e63, 'name': SearchDatastore_Task, 'duration_secs': 0.022399} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.009911] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.010034] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1848.010209] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1848.010357] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1848.010567] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1848.010827] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5709e0e0-26c7-403e-a5bd-525c403b39a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.022572] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1848.022719] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.954s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.022957] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.263s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.024436] env[62519]: INFO nova.compute.claims [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1848.033601] env[62519]: INFO nova.compute.manager [-] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Took 1.28 seconds to deallocate network for instance. [ 1848.033874] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1848.034053] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1848.036042] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d35a077-a115-4f80-886e-a839acafa29d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.042858] env[62519]: DEBUG nova.compute.manager [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1848.043092] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1848.045157] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a13a48-ba96-4ab9-b7ea-c7fce43a66c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.048381] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1848.048381] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52357cec-18d7-f50f-9576-5363d76e8f9e" [ 1848.048381] env[62519]: _type = "Task" [ 1848.048381] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.053873] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1848.054480] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a92c3c9e-8555-4b47-a09d-d125e9c23875 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.059583] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52357cec-18d7-f50f-9576-5363d76e8f9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.062424] env[62519]: DEBUG oslo_vmware.api [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1848.062424] env[62519]: value = "task-1803115" [ 1848.062424] env[62519]: _type = "Task" [ 1848.062424] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.071201] env[62519]: DEBUG oslo_vmware.api [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.201299] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803113, 'name': ReconfigVM_Task, 'duration_secs': 1.22974} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.201615] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Reconfigured VM instance instance-00000063 to attach disk [datastore1] eb5de0a4-0af3-4731-ab30-3ae3d72207a7/eb5de0a4-0af3-4731-ab30-3ae3d72207a7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1848.202280] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75bd63d0-0861-4aa5-8f55-b4b73e8fb5b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.210573] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1848.210573] env[62519]: value = "task-1803116" [ 1848.210573] env[62519]: _type = "Task" [ 1848.210573] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.219130] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803116, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.338661] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803114, 'name': Destroy_Task, 'duration_secs': 0.564173} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.338993] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Destroyed the VM [ 1848.339284] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1848.339540] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1ec3d089-5724-4cdd-9aad-29aebb7bbd82 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.345817] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1848.345817] env[62519]: value = "task-1803117" [ 1848.345817] env[62519]: _type = "Task" [ 1848.345817] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.354820] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803117, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.543066] env[62519]: DEBUG oslo_concurrency.lockutils [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.559377] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52357cec-18d7-f50f-9576-5363d76e8f9e, 'name': SearchDatastore_Task, 'duration_secs': 0.018969} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.560325] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-711554ea-51a4-4d61-b0fb-004579adadac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.572781] env[62519]: DEBUG oslo_vmware.api [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803115, 'name': PowerOffVM_Task, 'duration_secs': 0.282277} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.573852] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1848.574130] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1848.574386] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1848.574386] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523ca748-1c63-cf57-405a-48c2fdec769c" [ 1848.574386] env[62519]: _type = "Task" [ 1848.574386] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.574673] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-646d9a5e-3ffe-4283-a7d8-f5cc7f034002 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.585744] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523ca748-1c63-cf57-405a-48c2fdec769c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.721585] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803116, 'name': Rename_Task, 'duration_secs': 0.220482} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.721912] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1848.722181] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4996fab-609b-4720-a34c-fe43f07a46f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.730947] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1848.730947] env[62519]: value = "task-1803119" [ 1848.730947] env[62519]: _type = "Task" [ 1848.730947] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.740340] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.752653] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1848.752653] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1848.752653] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Deleting the datastore file [datastore1] fcff790b-d267-4d8c-80d8-ad66cfb66539 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1848.752653] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea8004c2-1724-4766-9d36-666dc22cb5d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.760125] env[62519]: DEBUG oslo_vmware.api [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for the task: (returnval){ [ 1848.760125] env[62519]: value = "task-1803120" [ 1848.760125] env[62519]: _type = "Task" [ 1848.760125] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.766848] env[62519]: DEBUG oslo_vmware.api [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.856027] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803117, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.088180] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523ca748-1c63-cf57-405a-48c2fdec769c, 'name': SearchDatastore_Task, 'duration_secs': 0.015823} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.088479] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1849.089167] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 646c9dfc-7b78-4cdb-b4f5-480c43af38c4/646c9dfc-7b78-4cdb-b4f5-480c43af38c4.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1849.089514] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28e9b14c-dcfb-4431-9eff-5e83cd82ec83 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.097725] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1849.097725] env[62519]: value = "task-1803121" [ 1849.097725] env[62519]: _type = "Task" [ 1849.097725] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.105295] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.244219] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803119, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.272749] env[62519]: DEBUG oslo_vmware.api [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Task: {'id': task-1803120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23058} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.273018] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1849.273209] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1849.273384] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1849.273556] env[62519]: INFO nova.compute.manager [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1849.273801] env[62519]: DEBUG oslo.service.loopingcall [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1849.273999] env[62519]: DEBUG nova.compute.manager [-] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1849.274160] env[62519]: DEBUG nova.network.neutron [-] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1849.360025] env[62519]: DEBUG oslo_vmware.api [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803117, 'name': RemoveSnapshot_Task, 'duration_secs': 0.643418} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.360025] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1849.360025] env[62519]: INFO nova.compute.manager [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Took 15.65 seconds to snapshot the instance on the hypervisor. [ 1849.361216] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c43ee89-4fcc-4aca-ad75-675e72a1c978 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.370603] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9131d3e6-c85a-4976-abbd-7152f57536da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.406802] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2cb52f-5128-4a22-a293-2f7908d6a2c6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.416270] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad68f74-9af9-4da1-9ac6-89770e576af1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.433151] env[62519]: DEBUG nova.compute.provider_tree [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1849.603635] env[62519]: DEBUG nova.compute.manager [req-86ba53ed-d97d-4bbe-b74c-0d899ac97bfa req-7722ed77-ded7-4dd6-8779-eb6debeabca7 service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Received event network-vif-deleted-ecfd75e5-0384-4fee-b19c-a0219648bc54 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1849.603868] env[62519]: INFO nova.compute.manager [req-86ba53ed-d97d-4bbe-b74c-0d899ac97bfa req-7722ed77-ded7-4dd6-8779-eb6debeabca7 service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Neutron deleted interface ecfd75e5-0384-4fee-b19c-a0219648bc54; detaching it from the instance and deleting it from the info cache [ 1849.604089] env[62519]: DEBUG nova.network.neutron [req-86ba53ed-d97d-4bbe-b74c-0d899ac97bfa req-7722ed77-ded7-4dd6-8779-eb6debeabca7 service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.611586] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803121, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.741490] env[62519]: DEBUG oslo_vmware.api [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803119, 'name': PowerOnVM_Task, 'duration_secs': 0.58362} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.741778] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1849.741979] env[62519]: INFO nova.compute.manager [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Took 9.56 seconds to spawn the instance on the hypervisor. [ 1849.742173] env[62519]: DEBUG nova.compute.manager [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1849.742915] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1744f62a-ccbf-4362-b632-e905523285a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.901906] env[62519]: DEBUG nova.compute.manager [None req-ab5aae9d-41b8-4316-b726-ea28056af8d2 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Found 1 images (rotation: 2) {{(pid=62519) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4998}} [ 1849.936025] env[62519]: DEBUG nova.scheduler.client.report [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1850.082359] env[62519]: DEBUG nova.network.neutron [-] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.111783] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803121, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.112149] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e79abc6f-bce9-45bc-ac87-d047241dd3c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.121256] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc9f280-4715-4ba9-9073-b119debab84b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.157986] env[62519]: DEBUG nova.compute.manager [req-86ba53ed-d97d-4bbe-b74c-0d899ac97bfa req-7722ed77-ded7-4dd6-8779-eb6debeabca7 service nova] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Detach interface failed, port_id=ecfd75e5-0384-4fee-b19c-a0219648bc54, reason: Instance fcff790b-d267-4d8c-80d8-ad66cfb66539 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1850.263306] env[62519]: INFO nova.compute.manager [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Took 20.10 seconds to build instance. [ 1850.442556] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.443183] env[62519]: DEBUG nova.compute.manager [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1850.446154] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.660s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.448030] env[62519]: INFO nova.compute.claims [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1850.585314] env[62519]: INFO nova.compute.manager [-] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Took 1.31 seconds to deallocate network for instance. [ 1850.611646] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803121, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.43919} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.611829] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 646c9dfc-7b78-4cdb-b4f5-480c43af38c4/646c9dfc-7b78-4cdb-b4f5-480c43af38c4.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1850.612133] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1850.612404] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b782a93b-bfb1-4d96-b8bd-fb22c7c36a16 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.620459] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1850.620459] env[62519]: value = "task-1803122" [ 1850.620459] env[62519]: _type = "Task" [ 1850.620459] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.629663] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803122, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.770292] env[62519]: DEBUG nova.compute.manager [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1850.771491] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02146bc-0066-40b1-85cb-e655c2e1a541 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.952646] env[62519]: DEBUG nova.compute.utils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1850.956246] env[62519]: DEBUG nova.compute.manager [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1850.956442] env[62519]: DEBUG nova.network.neutron [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1850.993806] env[62519]: DEBUG nova.policy [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b94adf2a04874e489cdadc04a95ae3af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9808ad7e38e34658aac06ebc932b0e32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1851.095952] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.136226] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803122, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214565} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.136540] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1851.137328] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ddcbe97-c784-4cbc-89b8-0f3b30c7e952 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.162437] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 646c9dfc-7b78-4cdb-b4f5-480c43af38c4/646c9dfc-7b78-4cdb-b4f5-480c43af38c4.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1851.162974] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e87fadb-2033-45ba-a395-fae36a592c08 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.184235] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1851.184235] env[62519]: value = "task-1803123" [ 1851.184235] env[62519]: _type = "Task" [ 1851.184235] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.192885] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803123, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.238588] env[62519]: DEBUG nova.network.neutron [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Successfully created port: 4a3113f5-f1bd-4056-9bbe-75a22f8189bc {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1851.283889] env[62519]: INFO nova.compute.manager [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] instance snapshotting [ 1851.283889] env[62519]: DEBUG nova.objects.instance [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'flavor' on Instance uuid 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1851.289096] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquiring lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.289379] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.291741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquiring lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.291741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.291741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.292397] env[62519]: INFO nova.compute.manager [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Terminating instance [ 1851.457874] env[62519]: DEBUG nova.compute.manager [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1851.635754] env[62519]: DEBUG nova.compute.manager [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Stashing vm_state: active {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 1851.682595] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86f8a4e-c82f-468c-bede-00f3ae1cba7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.698317] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02ffd4a-4433-44a4-ae6c-e56c8f18732c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.702080] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803123, 'name': ReconfigVM_Task, 'duration_secs': 0.288993} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.702354] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 646c9dfc-7b78-4cdb-b4f5-480c43af38c4/646c9dfc-7b78-4cdb-b4f5-480c43af38c4.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1851.703223] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a8e41ca-9c0f-4161-8aed-fa88dedad46e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.734770] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622b9575-4be1-4b6a-93ff-491055f4d425 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.738051] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1851.738051] env[62519]: value = "task-1803124" [ 1851.738051] env[62519]: _type = "Task" [ 1851.738051] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.745632] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be82da69-995b-419d-b185-a2fed70cfd09 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.753964] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803124, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.764849] env[62519]: DEBUG nova.compute.provider_tree [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1851.771020] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3bb5b2fc-3c16-485c-b3aa-bebff7f3a9a9 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.614s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.771020] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 11.489s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.771020] env[62519]: INFO nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] During sync_power_state the instance has a pending task (spawning). Skip. [ 1851.771020] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.793173] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d80c8d-7133-4360-8b8f-bbea0a14c212 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.796225] env[62519]: DEBUG nova.compute.manager [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1851.796437] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1851.797864] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182505f5-7079-444b-a806-ffc1de6cd513 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.819404] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9afab7-1583-4c64-a942-98258c6d3981 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.822342] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1851.822634] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-debc7026-7308-4015-ac67-c6111c397de1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.832893] env[62519]: DEBUG oslo_vmware.api [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1851.832893] env[62519]: value = "task-1803125" [ 1851.832893] env[62519]: _type = "Task" [ 1851.832893] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.848525] env[62519]: DEBUG oslo_vmware.api [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1803125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.155754] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.248791] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803124, 'name': Rename_Task, 'duration_secs': 0.15351} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.249093] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1852.249447] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3aba252a-a237-4ef5-9a6d-1bf997bfcb69 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.258301] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1852.258301] env[62519]: value = "task-1803126" [ 1852.258301] env[62519]: _type = "Task" [ 1852.258301] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.266524] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803126, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.269497] env[62519]: DEBUG nova.scheduler.client.report [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1852.335384] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1852.335508] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-06a276db-87ba-45d9-bc7e-35dc8481962a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.347034] env[62519]: DEBUG oslo_vmware.api [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1803125, 'name': PowerOffVM_Task, 'duration_secs': 0.290303} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.348495] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1852.348739] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1852.349194] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1852.349194] env[62519]: value = "task-1803127" [ 1852.349194] env[62519]: _type = "Task" [ 1852.349194] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.349459] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53d3d6f0-0daa-4e8c-91d4-04413953dd90 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.361459] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803127, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.467074] env[62519]: DEBUG nova.compute.manager [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1852.541219] env[62519]: DEBUG nova.virt.hardware [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1852.541219] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c53949d-babc-4619-9117-3b7a80b16bc5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.541219] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9d64f6-d6ae-4a41-b875-9b39382e6dd7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.668295] env[62519]: DEBUG nova.compute.manager [req-7332868a-596f-40dd-a9d1-38b1cef47f3f req-b33d0eb5-8f91-4be5-be26-3be02b752f20 service nova] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Received event network-vif-plugged-4a3113f5-f1bd-4056-9bbe-75a22f8189bc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1852.668558] env[62519]: DEBUG oslo_concurrency.lockutils [req-7332868a-596f-40dd-a9d1-38b1cef47f3f req-b33d0eb5-8f91-4be5-be26-3be02b752f20 service nova] Acquiring lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.668887] env[62519]: DEBUG oslo_concurrency.lockutils [req-7332868a-596f-40dd-a9d1-38b1cef47f3f req-b33d0eb5-8f91-4be5-be26-3be02b752f20 service nova] Lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.669115] env[62519]: DEBUG oslo_concurrency.lockutils [req-7332868a-596f-40dd-a9d1-38b1cef47f3f req-b33d0eb5-8f91-4be5-be26-3be02b752f20 service nova] Lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.669456] env[62519]: DEBUG nova.compute.manager [req-7332868a-596f-40dd-a9d1-38b1cef47f3f req-b33d0eb5-8f91-4be5-be26-3be02b752f20 service nova] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] No waiting events found dispatching network-vif-plugged-4a3113f5-f1bd-4056-9bbe-75a22f8189bc {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1852.669726] env[62519]: WARNING nova.compute.manager [req-7332868a-596f-40dd-a9d1-38b1cef47f3f req-b33d0eb5-8f91-4be5-be26-3be02b752f20 service nova] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Received unexpected event network-vif-plugged-4a3113f5-f1bd-4056-9bbe-75a22f8189bc for instance with vm_state building and task_state spawning. [ 1852.755347] env[62519]: DEBUG nova.network.neutron [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Successfully updated port: 4a3113f5-f1bd-4056-9bbe-75a22f8189bc {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1852.769691] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803126, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.774645] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.775294] env[62519]: DEBUG nova.compute.manager [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1852.778656] env[62519]: DEBUG oslo_concurrency.lockutils [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.236s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.778881] env[62519]: DEBUG nova.objects.instance [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lazy-loading 'resources' on Instance uuid e143206e-eb12-41b8-9140-229c1533fd80 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1852.862085] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803127, 'name': CreateSnapshot_Task, 'duration_secs': 0.500609} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.862379] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1852.863150] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a048ea57-f3ed-4e40-bc88-7444b2546be2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.258765] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "refresh_cache-70abb2e0-1ff2-49dd-b40f-9cac244a249e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.259121] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired lock "refresh_cache-70abb2e0-1ff2-49dd-b40f-9cac244a249e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.259171] env[62519]: DEBUG nova.network.neutron [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1853.269795] env[62519]: DEBUG oslo_vmware.api [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803126, 'name': PowerOnVM_Task, 'duration_secs': 0.520845} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.271617] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1853.271829] env[62519]: INFO nova.compute.manager [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Took 10.13 seconds to spawn the instance on the hypervisor. [ 1853.272014] env[62519]: DEBUG nova.compute.manager [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1853.272309] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1853.272489] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1853.272658] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Deleting the datastore file [datastore1] 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1853.273665] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08548804-1590-4d69-b29f-a56cffa0765f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.276181] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-039b38fd-8195-4f7d-b5be-586c319b8fa5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.279950] env[62519]: DEBUG nova.compute.utils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1853.282696] env[62519]: DEBUG nova.compute.manager [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1853.282865] env[62519]: DEBUG nova.network.neutron [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1853.291028] env[62519]: DEBUG oslo_vmware.api [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for the task: (returnval){ [ 1853.291028] env[62519]: value = "task-1803129" [ 1853.291028] env[62519]: _type = "Task" [ 1853.291028] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.301523] env[62519]: DEBUG oslo_vmware.api [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1803129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.334155] env[62519]: DEBUG nova.policy [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '053a30aaf81b4cbd8ced7018ebfe1f40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e755fb5a6e94068b6c99b1638081f5f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1853.383351] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1853.385988] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1e3f4488-23f3-4b81-bc15-a04b2e248373 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.395390] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1853.395390] env[62519]: value = "task-1803130" [ 1853.395390] env[62519]: _type = "Task" [ 1853.395390] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.404944] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803130, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.539197] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.539604] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.540329] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "99f22198-1a65-4d0d-b665-90c7063dbdb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.540329] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.540329] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.543288] env[62519]: INFO nova.compute.manager [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Terminating instance [ 1853.591599] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5af3944-7642-412c-916a-cd66e1b670b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.600809] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7ec51e-e836-4788-add6-1ff5dbbc769d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.650525] env[62519]: DEBUG nova.network.neutron [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Successfully created port: 885622bf-cd63-46f6-ac64-de4e37a95265 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1853.654233] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60b338d-b845-4869-8584-a37b1c8eed74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.666462] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3ae8e3-679a-45d2-be63-c5c781c4937c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.690233] env[62519]: DEBUG nova.compute.provider_tree [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1853.792667] env[62519]: DEBUG nova.compute.manager [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1853.812586] env[62519]: INFO nova.compute.manager [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Took 20.87 seconds to build instance. [ 1853.816383] env[62519]: DEBUG nova.network.neutron [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1853.835715] env[62519]: DEBUG oslo_vmware.api [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Task: {'id': task-1803129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.274975} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.836456] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1853.836842] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1853.837207] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1853.837562] env[62519]: INFO nova.compute.manager [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Took 2.04 seconds to destroy the instance on the hypervisor. [ 1853.838008] env[62519]: DEBUG oslo.service.loopingcall [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1853.838359] env[62519]: DEBUG nova.compute.manager [-] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1853.838571] env[62519]: DEBUG nova.network.neutron [-] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1853.910934] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803130, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.047824] env[62519]: DEBUG nova.compute.manager [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1854.048083] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1854.049018] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dff5b9f-fb4d-4e95-92d2-b205e644c82f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.058507] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1854.058724] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5997921f-86c5-4dfb-ab4d-eaa801e059dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.069126] env[62519]: DEBUG oslo_vmware.api [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1854.069126] env[62519]: value = "task-1803131" [ 1854.069126] env[62519]: _type = "Task" [ 1854.069126] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.080954] env[62519]: DEBUG oslo_vmware.api [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803131, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.163262] env[62519]: DEBUG nova.network.neutron [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Updating instance_info_cache with network_info: [{"id": "4a3113f5-f1bd-4056-9bbe-75a22f8189bc", "address": "fa:16:3e:c0:7e:c9", "network": {"id": "b50c1a89-c14c-481c-b5d9-748f18863a45", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-698120386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9808ad7e38e34658aac06ebc932b0e32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a3113f5-f1", "ovs_interfaceid": "4a3113f5-f1bd-4056-9bbe-75a22f8189bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.167612] env[62519]: DEBUG nova.compute.manager [req-0f442894-c930-40d7-9467-de0070de9982 req-44d06e96-5a88-4a6d-a068-d4e3cfe83eee service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Received event network-changed-cb01cf13-b6c4-4f35-b75b-86f1ba67b87a {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1854.167612] env[62519]: DEBUG nova.compute.manager [req-0f442894-c930-40d7-9467-de0070de9982 req-44d06e96-5a88-4a6d-a068-d4e3cfe83eee service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Refreshing instance network info cache due to event network-changed-cb01cf13-b6c4-4f35-b75b-86f1ba67b87a. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1854.167612] env[62519]: DEBUG oslo_concurrency.lockutils [req-0f442894-c930-40d7-9467-de0070de9982 req-44d06e96-5a88-4a6d-a068-d4e3cfe83eee service nova] Acquiring lock "refresh_cache-646c9dfc-7b78-4cdb-b4f5-480c43af38c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.167729] env[62519]: DEBUG oslo_concurrency.lockutils [req-0f442894-c930-40d7-9467-de0070de9982 req-44d06e96-5a88-4a6d-a068-d4e3cfe83eee service nova] Acquired lock "refresh_cache-646c9dfc-7b78-4cdb-b4f5-480c43af38c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.167921] env[62519]: DEBUG nova.network.neutron [req-0f442894-c930-40d7-9467-de0070de9982 req-44d06e96-5a88-4a6d-a068-d4e3cfe83eee service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Refreshing network info cache for port cb01cf13-b6c4-4f35-b75b-86f1ba67b87a {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1854.197021] env[62519]: DEBUG nova.scheduler.client.report [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1854.408576] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803130, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.579947] env[62519]: DEBUG oslo_vmware.api [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803131, 'name': PowerOffVM_Task, 'duration_secs': 0.22435} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.581266] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1854.581448] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1854.583346] env[62519]: DEBUG nova.objects.instance [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lazy-loading 'flavor' on Instance uuid 9f71845a-e80c-4822-b3de-717f1d83bc49 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1854.584499] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2379c0ea-779b-4366-9d21-f76e06d53e6f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.671473] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Releasing lock "refresh_cache-70abb2e0-1ff2-49dd-b40f-9cac244a249e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.671473] env[62519]: DEBUG nova.compute.manager [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Instance network_info: |[{"id": "4a3113f5-f1bd-4056-9bbe-75a22f8189bc", "address": "fa:16:3e:c0:7e:c9", "network": {"id": "b50c1a89-c14c-481c-b5d9-748f18863a45", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-698120386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9808ad7e38e34658aac06ebc932b0e32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a3113f5-f1", "ovs_interfaceid": "4a3113f5-f1bd-4056-9bbe-75a22f8189bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1854.674069] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:7e:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a3113f5-f1bd-4056-9bbe-75a22f8189bc', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1854.687518] env[62519]: DEBUG oslo.service.loopingcall [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1854.687924] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1854.688781] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1daa8d7-c3ca-44d9-a74e-d5e68a79f5e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.715746] env[62519]: DEBUG oslo_concurrency.lockutils [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.935s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.717752] env[62519]: DEBUG nova.compute.manager [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Received event network-changed-4a3113f5-f1bd-4056-9bbe-75a22f8189bc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1854.717980] env[62519]: DEBUG nova.compute.manager [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Refreshing instance network info cache due to event network-changed-4a3113f5-f1bd-4056-9bbe-75a22f8189bc. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1854.718262] env[62519]: DEBUG oslo_concurrency.lockutils [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] Acquiring lock "refresh_cache-70abb2e0-1ff2-49dd-b40f-9cac244a249e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.718460] env[62519]: DEBUG oslo_concurrency.lockutils [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] Acquired lock "refresh_cache-70abb2e0-1ff2-49dd-b40f-9cac244a249e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.718711] env[62519]: DEBUG nova.network.neutron [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Refreshing network info cache for port 4a3113f5-f1bd-4056-9bbe-75a22f8189bc {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1854.720877] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.625s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.720877] env[62519]: DEBUG nova.objects.instance [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lazy-loading 'resources' on Instance uuid fcff790b-d267-4d8c-80d8-ad66cfb66539 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1854.733415] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1854.733415] env[62519]: value = "task-1803133" [ 1854.733415] env[62519]: _type = "Task" [ 1854.733415] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.735313] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1854.735589] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1854.735713] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Deleting the datastore file [datastore1] 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1854.739900] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4dd7b709-6701-4707-9e1b-344b83dc4a95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.748850] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803133, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.750671] env[62519]: DEBUG oslo_vmware.api [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1854.750671] env[62519]: value = "task-1803134" [ 1854.750671] env[62519]: _type = "Task" [ 1854.750671] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.754854] env[62519]: INFO nova.scheduler.client.report [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Deleted allocations for instance e143206e-eb12-41b8-9140-229c1533fd80 [ 1854.762646] env[62519]: DEBUG oslo_vmware.api [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803134, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.826211] env[62519]: DEBUG nova.compute.manager [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1854.863814] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1854.863814] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1854.863814] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1854.864030] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1854.864392] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1854.864567] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1854.864810] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1854.864975] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1854.865196] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1854.865363] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1854.865735] env[62519]: DEBUG nova.virt.hardware [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1854.869019] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cf49a1-db3a-4d1a-9a9f-3043de98ba85 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.877959] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c936382f-2c1d-4cf0-a616-66257f4255b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.907706] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803130, 'name': CloneVM_Task, 'duration_secs': 1.499379} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.907706] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Created linked-clone VM from snapshot [ 1854.908386] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950b81af-6b89-4541-a2b0-4825a2546a6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.918113] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Uploading image cce8cdd4-93c5-4d31-9e3f-2461175deeb2 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1854.941837] env[62519]: DEBUG oslo_vmware.rw_handles [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1854.941837] env[62519]: value = "vm-373831" [ 1854.941837] env[62519]: _type = "VirtualMachine" [ 1854.941837] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1854.942135] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f4d5aa24-ad43-4719-8342-1485db6e238b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.949992] env[62519]: DEBUG oslo_vmware.rw_handles [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease: (returnval){ [ 1854.949992] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520dbb19-f6ca-06f0-68f9-0d84ef2eca25" [ 1854.949992] env[62519]: _type = "HttpNfcLease" [ 1854.949992] env[62519]: } obtained for exporting VM: (result){ [ 1854.949992] env[62519]: value = "vm-373831" [ 1854.949992] env[62519]: _type = "VirtualMachine" [ 1854.949992] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1854.950244] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the lease: (returnval){ [ 1854.950244] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520dbb19-f6ca-06f0-68f9-0d84ef2eca25" [ 1854.950244] env[62519]: _type = "HttpNfcLease" [ 1854.950244] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1854.956850] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1854.956850] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520dbb19-f6ca-06f0-68f9-0d84ef2eca25" [ 1854.956850] env[62519]: _type = "HttpNfcLease" [ 1854.956850] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1854.957695] env[62519]: DEBUG nova.network.neutron [req-0f442894-c930-40d7-9467-de0070de9982 req-44d06e96-5a88-4a6d-a068-d4e3cfe83eee service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Updated VIF entry in instance network info cache for port cb01cf13-b6c4-4f35-b75b-86f1ba67b87a. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1854.958016] env[62519]: DEBUG nova.network.neutron [req-0f442894-c930-40d7-9467-de0070de9982 req-44d06e96-5a88-4a6d-a068-d4e3cfe83eee service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Updating instance_info_cache with network_info: [{"id": "cb01cf13-b6c4-4f35-b75b-86f1ba67b87a", "address": "fa:16:3e:53:b3:0f", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb01cf13-b6", "ovs_interfaceid": "cb01cf13-b6c4-4f35-b75b-86f1ba67b87a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.034186] env[62519]: DEBUG nova.network.neutron [-] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.090953] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.091300] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquired lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.247740] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803133, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.273721] env[62519]: DEBUG oslo_vmware.api [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803134, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.383938} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.273721] env[62519]: DEBUG oslo_concurrency.lockutils [None req-954f71d8-1dcb-4e43-9220-7e08386feb3d tempest-ServerDiskConfigTestJSON-1314025576 tempest-ServerDiskConfigTestJSON-1314025576-project-member] Lock "e143206e-eb12-41b8-9140-229c1533fd80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.208s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.274695] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1855.274905] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1855.275166] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1855.275357] env[62519]: INFO nova.compute.manager [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1855.277045] env[62519]: DEBUG oslo.service.loopingcall [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1855.277045] env[62519]: DEBUG nova.compute.manager [-] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1855.277045] env[62519]: DEBUG nova.network.neutron [-] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1855.335495] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f642eb67-c251-474f-9741-074de84f9988 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.395s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.335495] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.051s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.335495] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8117b802-1999-41e2-8307-a91280f90d38 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.460461] env[62519]: DEBUG oslo_concurrency.lockutils [req-0f442894-c930-40d7-9467-de0070de9982 req-44d06e96-5a88-4a6d-a068-d4e3cfe83eee service nova] Releasing lock "refresh_cache-646c9dfc-7b78-4cdb-b4f5-480c43af38c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.461112] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1855.461112] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520dbb19-f6ca-06f0-68f9-0d84ef2eca25" [ 1855.461112] env[62519]: _type = "HttpNfcLease" [ 1855.461112] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1855.461352] env[62519]: DEBUG oslo_vmware.rw_handles [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1855.461352] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520dbb19-f6ca-06f0-68f9-0d84ef2eca25" [ 1855.461352] env[62519]: _type = "HttpNfcLease" [ 1855.461352] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1855.462175] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3704e9a3-a9db-44b2-a21f-6c5e07b42f2d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.471478] env[62519]: DEBUG oslo_vmware.rw_handles [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5220744e-dbaf-95c4-4c63-6db42741fabe/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1855.471691] env[62519]: DEBUG oslo_vmware.rw_handles [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5220744e-dbaf-95c4-4c63-6db42741fabe/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1855.536206] env[62519]: INFO nova.compute.manager [-] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Took 1.70 seconds to deallocate network for instance. [ 1855.586116] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-caaeafb1-8cfb-48d6-b0e5-3610ef60ee7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.676982] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fa7d9e-bacb-4004-aa64-6ecf50802a2c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.689084] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b25edc-9d51-4a1e-bbb3-660fafca7edd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.721322] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573104bd-2b61-4d70-8af5-6d76f94742ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.730724] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a66d268-7699-4f98-a6e1-228ec801a529 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.747514] env[62519]: DEBUG nova.compute.provider_tree [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1855.762237] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803133, 'name': CreateVM_Task, 'duration_secs': 0.704681} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.762237] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1855.762822] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.763780] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.763780] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1855.763780] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20b0f162-6c7a-4b6b-8525-36ef2bdcd072 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.771378] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1855.771378] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bdc566-d9b3-0bca-5e04-f72a8b61e798" [ 1855.771378] env[62519]: _type = "Task" [ 1855.771378] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.784369] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bdc566-d9b3-0bca-5e04-f72a8b61e798, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.784369] env[62519]: DEBUG nova.network.neutron [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Successfully updated port: 885622bf-cd63-46f6-ac64-de4e37a95265 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1855.825801] env[62519]: DEBUG nova.compute.manager [req-fc59b3c4-89fd-4518-8a2e-23eb6e75f10a req-4b3b8975-ff06-4067-bdb0-532ccfd3e278 service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Received event network-vif-plugged-885622bf-cd63-46f6-ac64-de4e37a95265 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1855.826101] env[62519]: DEBUG oslo_concurrency.lockutils [req-fc59b3c4-89fd-4518-8a2e-23eb6e75f10a req-4b3b8975-ff06-4067-bdb0-532ccfd3e278 service nova] Acquiring lock "9327a897-8e4f-4c59-952e-aecfac4028e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.826399] env[62519]: DEBUG oslo_concurrency.lockutils [req-fc59b3c4-89fd-4518-8a2e-23eb6e75f10a req-4b3b8975-ff06-4067-bdb0-532ccfd3e278 service nova] Lock "9327a897-8e4f-4c59-952e-aecfac4028e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.826663] env[62519]: DEBUG oslo_concurrency.lockutils [req-fc59b3c4-89fd-4518-8a2e-23eb6e75f10a req-4b3b8975-ff06-4067-bdb0-532ccfd3e278 service nova] Lock "9327a897-8e4f-4c59-952e-aecfac4028e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.826997] env[62519]: DEBUG nova.compute.manager [req-fc59b3c4-89fd-4518-8a2e-23eb6e75f10a req-4b3b8975-ff06-4067-bdb0-532ccfd3e278 service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] No waiting events found dispatching network-vif-plugged-885622bf-cd63-46f6-ac64-de4e37a95265 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1855.827425] env[62519]: WARNING nova.compute.manager [req-fc59b3c4-89fd-4518-8a2e-23eb6e75f10a req-4b3b8975-ff06-4067-bdb0-532ccfd3e278 service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Received unexpected event network-vif-plugged-885622bf-cd63-46f6-ac64-de4e37a95265 for instance with vm_state building and task_state spawning. [ 1855.853868] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.521s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.045114] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.090958] env[62519]: DEBUG nova.network.neutron [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Updated VIF entry in instance network info cache for port 4a3113f5-f1bd-4056-9bbe-75a22f8189bc. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1856.091440] env[62519]: DEBUG nova.network.neutron [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Updating instance_info_cache with network_info: [{"id": "4a3113f5-f1bd-4056-9bbe-75a22f8189bc", "address": "fa:16:3e:c0:7e:c9", "network": {"id": "b50c1a89-c14c-481c-b5d9-748f18863a45", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-698120386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9808ad7e38e34658aac06ebc932b0e32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a3113f5-f1", "ovs_interfaceid": "4a3113f5-f1bd-4056-9bbe-75a22f8189bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.260193] env[62519]: DEBUG nova.scheduler.client.report [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1856.268930] env[62519]: DEBUG nova.network.neutron [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1856.292117] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bdc566-d9b3-0bca-5e04-f72a8b61e798, 'name': SearchDatastore_Task, 'duration_secs': 0.01165} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.295734] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "refresh_cache-9327a897-8e4f-4c59-952e-aecfac4028e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.297271] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "refresh_cache-9327a897-8e4f-4c59-952e-aecfac4028e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.297442] env[62519]: DEBUG nova.network.neutron [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1856.300027] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.300422] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1856.300803] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.301073] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.301820] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1856.302746] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0e3b234-5e8f-48f7-8479-aa0b6085850c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.322020] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1856.322020] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1856.322020] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbe8f5ca-38de-4809-8bc5-b9839c0d5ef1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.330616] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1856.330616] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5218c785-6eee-57bc-d673-f2d60f3b57c5" [ 1856.330616] env[62519]: _type = "Task" [ 1856.330616] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.345048] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5218c785-6eee-57bc-d673-f2d60f3b57c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.594877] env[62519]: DEBUG oslo_concurrency.lockutils [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] Releasing lock "refresh_cache-70abb2e0-1ff2-49dd-b40f-9cac244a249e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.597819] env[62519]: DEBUG nova.compute.manager [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Received event network-vif-deleted-b2132242-f6ec-4f0b-a6ef-d49e2d0db056 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1856.597819] env[62519]: INFO nova.compute.manager [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Neutron deleted interface b2132242-f6ec-4f0b-a6ef-d49e2d0db056; detaching it from the instance and deleting it from the info cache [ 1856.597819] env[62519]: DEBUG nova.network.neutron [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.753688] env[62519]: DEBUG nova.compute.manager [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Received event network-changed-885622bf-cd63-46f6-ac64-de4e37a95265 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1856.753911] env[62519]: DEBUG nova.compute.manager [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Refreshing instance network info cache due to event network-changed-885622bf-cd63-46f6-ac64-de4e37a95265. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1856.754206] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] Acquiring lock "refresh_cache-9327a897-8e4f-4c59-952e-aecfac4028e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.766109] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.046s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.768560] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.613s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.804707] env[62519]: INFO nova.scheduler.client.report [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Deleted allocations for instance fcff790b-d267-4d8c-80d8-ad66cfb66539 [ 1856.839849] env[62519]: DEBUG nova.network.neutron [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1856.847787] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5218c785-6eee-57bc-d673-f2d60f3b57c5, 'name': SearchDatastore_Task, 'duration_secs': 0.01436} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.849127] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-274eee4e-5bec-4316-8b3c-5c53d820a257 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.856863] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1856.856863] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526bcbe8-edf1-e081-a28d-6823a4065674" [ 1856.856863] env[62519]: _type = "Task" [ 1856.856863] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.867049] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526bcbe8-edf1-e081-a28d-6823a4065674, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.978010] env[62519]: DEBUG nova.network.neutron [-] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.081569] env[62519]: DEBUG nova.network.neutron [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Updating instance_info_cache with network_info: [{"id": "885622bf-cd63-46f6-ac64-de4e37a95265", "address": "fa:16:3e:51:40:85", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885622bf-cd", "ovs_interfaceid": "885622bf-cd63-46f6-ac64-de4e37a95265", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.099460] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6ba0f73-e2cd-4b96-939a-4398aef50364 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.111090] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea81fedc-3de5-439a-90c5-647bf367d4b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.149662] env[62519]: DEBUG nova.compute.manager [req-2fdc305e-aba1-43fb-905c-3dfc287425f6 req-3513f54e-fe1d-4d8b-bcb6-368379604af5 service nova] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Detach interface failed, port_id=b2132242-f6ec-4f0b-a6ef-d49e2d0db056, reason: Instance 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1857.192584] env[62519]: DEBUG nova.network.neutron [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updating instance_info_cache with network_info: [{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.277287] env[62519]: INFO nova.compute.claims [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1857.319349] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f109637f-debb-4743-b81e-5f304008181d tempest-InstanceActionsNegativeTestJSON-1848810980 tempest-InstanceActionsNegativeTestJSON-1848810980-project-member] Lock "fcff790b-d267-4d8c-80d8-ad66cfb66539" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.786s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.371156] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526bcbe8-edf1-e081-a28d-6823a4065674, 'name': SearchDatastore_Task, 'duration_secs': 0.019658} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.371770] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.371982] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 70abb2e0-1ff2-49dd-b40f-9cac244a249e/70abb2e0-1ff2-49dd-b40f-9cac244a249e.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1857.372279] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86f02d1a-79e8-4ab2-8bcb-4cb401aa7791 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.385699] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1857.385699] env[62519]: value = "task-1803137" [ 1857.385699] env[62519]: _type = "Task" [ 1857.385699] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.402299] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.488601] env[62519]: INFO nova.compute.manager [-] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Took 2.21 seconds to deallocate network for instance. [ 1857.584695] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "refresh_cache-9327a897-8e4f-4c59-952e-aecfac4028e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.585184] env[62519]: DEBUG nova.compute.manager [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Instance network_info: |[{"id": "885622bf-cd63-46f6-ac64-de4e37a95265", "address": "fa:16:3e:51:40:85", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885622bf-cd", "ovs_interfaceid": "885622bf-cd63-46f6-ac64-de4e37a95265", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1857.585625] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] Acquired lock "refresh_cache-9327a897-8e4f-4c59-952e-aecfac4028e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.585911] env[62519]: DEBUG nova.network.neutron [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Refreshing network info cache for port 885622bf-cd63-46f6-ac64-de4e37a95265 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1857.591625] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:40:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '885622bf-cd63-46f6-ac64-de4e37a95265', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1857.604743] env[62519]: DEBUG oslo.service.loopingcall [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1857.610681] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1857.611378] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-acbb31f2-bf6e-4caf-add9-539d9bdecff8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.637199] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1857.637199] env[62519]: value = "task-1803138" [ 1857.637199] env[62519]: _type = "Task" [ 1857.637199] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.647213] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803138, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.695532] env[62519]: DEBUG oslo_concurrency.lockutils [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Releasing lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.695887] env[62519]: DEBUG nova.compute.manager [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Inject network info {{(pid=62519) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7696}} [ 1857.696265] env[62519]: DEBUG nova.compute.manager [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] network_info to inject: |[{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7697}} [ 1857.702313] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Reconfiguring VM instance to set the machine id {{(pid=62519) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1857.702740] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4be84d6d-ddf3-4ab2-96e4-7cbcecf32c9c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.724572] env[62519]: DEBUG oslo_vmware.api [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1857.724572] env[62519]: value = "task-1803139" [ 1857.724572] env[62519]: _type = "Task" [ 1857.724572] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.736377] env[62519]: DEBUG oslo_vmware.api [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1803139, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.785292] env[62519]: INFO nova.compute.resource_tracker [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating resource usage from migration 778f14b8-0ef2-4f73-ab48-4844bd1b034d [ 1857.904915] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803137, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.996262] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.118928] env[62519]: DEBUG nova.network.neutron [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Updated VIF entry in instance network info cache for port 885622bf-cd63-46f6-ac64-de4e37a95265. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1858.119548] env[62519]: DEBUG nova.network.neutron [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Updating instance_info_cache with network_info: [{"id": "885622bf-cd63-46f6-ac64-de4e37a95265", "address": "fa:16:3e:51:40:85", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap885622bf-cd", "ovs_interfaceid": "885622bf-cd63-46f6-ac64-de4e37a95265", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.151803] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803138, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.165718] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5228d9b8-75a5-48ff-8136-d10ff1c4feed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.176630] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b0dfcb-091f-4389-a4b8-eaee159761e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.213090] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5bf896-36ca-47ba-9c18-6427e13473c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.217541] env[62519]: DEBUG nova.objects.instance [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lazy-loading 'flavor' on Instance uuid 9f71845a-e80c-4822-b3de-717f1d83bc49 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1858.226103] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd06904-039d-4bb1-999a-c00067eaa8d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.243617] env[62519]: DEBUG oslo_vmware.api [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1803139, 'name': ReconfigVM_Task, 'duration_secs': 0.230747} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.252753] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-51535aaf-a765-460c-a147-f584ca010233 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Reconfigured VM instance to set the machine id {{(pid=62519) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1858.257109] env[62519]: DEBUG nova.compute.provider_tree [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1858.405289] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59653} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.408306] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 70abb2e0-1ff2-49dd-b40f-9cac244a249e/70abb2e0-1ff2-49dd-b40f-9cac244a249e.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1858.408707] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1858.409095] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2ae4d7d6-7932-4b18-b0c5-4d0078c1b04b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.419522] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1858.419522] env[62519]: value = "task-1803140" [ 1858.419522] env[62519]: _type = "Task" [ 1858.419522] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.435379] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803140, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.625055] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] Releasing lock "refresh_cache-9327a897-8e4f-4c59-952e-aecfac4028e0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.625055] env[62519]: DEBUG nova.compute.manager [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Received event network-changed-89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1858.625055] env[62519]: DEBUG nova.compute.manager [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Refreshing instance network info cache due to event network-changed-89b7045e-9b9a-45f2-b8e7-e5d063bd1605. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1858.625055] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] Acquiring lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.625055] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] Acquired lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.625055] env[62519]: DEBUG nova.network.neutron [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Refreshing network info cache for port 89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1858.653223] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803138, 'name': CreateVM_Task, 'duration_secs': 0.607675} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.653223] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1858.653223] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.653223] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.653223] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1858.653784] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1421108-7717-452b-80b1-00bed3d1ee71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.659776] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1858.659776] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b3293d-daf8-834f-15c2-d7993847f078" [ 1858.659776] env[62519]: _type = "Task" [ 1858.659776] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.669537] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b3293d-daf8-834f-15c2-d7993847f078, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.722488] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.761315] env[62519]: DEBUG nova.scheduler.client.report [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1858.933113] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803140, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094328} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.933113] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1858.933113] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32cbe90-b7a9-4a2f-b3ca-bca80837a9bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.968377] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 70abb2e0-1ff2-49dd-b40f-9cac244a249e/70abb2e0-1ff2-49dd-b40f-9cac244a249e.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1858.968377] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11adadbb-6747-49c2-8c79-8080a695aab8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.990761] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1858.990761] env[62519]: value = "task-1803142" [ 1858.990761] env[62519]: _type = "Task" [ 1858.990761] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.000590] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803142, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.172731] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b3293d-daf8-834f-15c2-d7993847f078, 'name': SearchDatastore_Task, 'duration_secs': 0.02523} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.173452] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.173452] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1859.173679] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.173854] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.174092] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1859.174417] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-388255a4-b8ec-4e71-9378-83b35075d1cc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.190619] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1859.191065] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1859.191676] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25f47536-6414-4522-9dac-cfed7dbd6d0e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.198444] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1859.198444] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cd3101-10af-12cc-8a06-938d77a9df44" [ 1859.198444] env[62519]: _type = "Task" [ 1859.198444] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.210387] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cd3101-10af-12cc-8a06-938d77a9df44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.266207] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.498s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.266439] env[62519]: INFO nova.compute.manager [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Migrating [ 1859.273707] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.229s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.273971] env[62519]: DEBUG nova.objects.instance [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lazy-loading 'resources' on Instance uuid 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1859.404979] env[62519]: DEBUG nova.network.neutron [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updated VIF entry in instance network info cache for port 89b7045e-9b9a-45f2-b8e7-e5d063bd1605. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1859.406492] env[62519]: DEBUG nova.network.neutron [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updating instance_info_cache with network_info: [{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.504797] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803142, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.709991] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cd3101-10af-12cc-8a06-938d77a9df44, 'name': SearchDatastore_Task, 'duration_secs': 0.015553} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.710889] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9705d1f7-1dcb-413f-bdfd-eff02b8cb443 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.717418] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1859.717418] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5265b5c2-1023-c4a6-350c-653781886bce" [ 1859.717418] env[62519]: _type = "Task" [ 1859.717418] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.726935] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5265b5c2-1023-c4a6-350c-653781886bce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.788649] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.789551] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.790843] env[62519]: DEBUG nova.network.neutron [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1859.910032] env[62519]: DEBUG oslo_concurrency.lockutils [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] Releasing lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.910032] env[62519]: DEBUG nova.compute.manager [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Received event network-vif-deleted-55cb0499-3e6e-42ef-bd75-edafccb32e03 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1859.910032] env[62519]: INFO nova.compute.manager [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Neutron deleted interface 55cb0499-3e6e-42ef-bd75-edafccb32e03; detaching it from the instance and deleting it from the info cache [ 1859.910227] env[62519]: DEBUG nova.network.neutron [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.911430] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquired lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.003678] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803142, 'name': ReconfigVM_Task, 'duration_secs': 0.566067} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.006445] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 70abb2e0-1ff2-49dd-b40f-9cac244a249e/70abb2e0-1ff2-49dd-b40f-9cac244a249e.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1860.008220] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8d4f6b7-f443-41d7-a634-ef4f465e73db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.014917] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1860.014917] env[62519]: value = "task-1803143" [ 1860.014917] env[62519]: _type = "Task" [ 1860.014917] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.026863] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803143, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.031579] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c574e22-292f-4042-845f-e85179433ec0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.039653] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1b193f-cf74-470a-b933-036dd7607471 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.074421] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539f2710-1276-41dd-86d1-529e04591240 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.083094] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c65268a-4710-4787-a9ff-179c6b44cd90 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.100158] env[62519]: DEBUG nova.compute.provider_tree [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1860.233599] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5265b5c2-1023-c4a6-350c-653781886bce, 'name': SearchDatastore_Task, 'duration_secs': 0.017129} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.234171] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.234583] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 9327a897-8e4f-4c59-952e-aecfac4028e0/9327a897-8e4f-4c59-952e-aecfac4028e0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1860.234997] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39ee3dbf-dd1f-45cf-935f-be7d85b9f73b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.245200] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1860.245200] env[62519]: value = "task-1803144" [ 1860.245200] env[62519]: _type = "Task" [ 1860.245200] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.255601] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.415481] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d7a6fe2-d725-4eb4-89cd-d32fefe143b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.431272] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9068b7b-ed48-43ad-a04a-f3d0850d645d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.483188] env[62519]: DEBUG nova.compute.manager [req-e02b5fd2-d3e2-458d-9248-b2c7d116ca2a req-eb22218a-cbb8-458b-a4d3-5c5c5efbd1e9 service nova] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Detach interface failed, port_id=55cb0499-3e6e-42ef-bd75-edafccb32e03, reason: Instance 99f22198-1a65-4d0d-b665-90c7063dbdb9 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1860.527608] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803143, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.604911] env[62519]: DEBUG nova.scheduler.client.report [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1860.636826] env[62519]: DEBUG nova.network.neutron [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1860.730747] env[62519]: DEBUG nova.compute.manager [req-6c573f76-646f-41a1-8e46-70236583d681 req-74a5c945-e3e8-461f-a1fa-82fbf6b3d8db service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Received event network-changed-89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1860.731273] env[62519]: DEBUG nova.compute.manager [req-6c573f76-646f-41a1-8e46-70236583d681 req-74a5c945-e3e8-461f-a1fa-82fbf6b3d8db service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Refreshing instance network info cache due to event network-changed-89b7045e-9b9a-45f2-b8e7-e5d063bd1605. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1860.731273] env[62519]: DEBUG oslo_concurrency.lockutils [req-6c573f76-646f-41a1-8e46-70236583d681 req-74a5c945-e3e8-461f-a1fa-82fbf6b3d8db service nova] Acquiring lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.758785] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803144, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.791131] env[62519]: DEBUG nova.network.neutron [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance_info_cache with network_info: [{"id": "98c5e48e-5515-4c54-af43-86a9b283477d", "address": "fa:16:3e:72:79:66", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c5e48e-55", "ovs_interfaceid": "98c5e48e-5515-4c54-af43-86a9b283477d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.029061] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803143, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.116154] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.119841] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.124s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.120238] env[62519]: DEBUG nova.objects.instance [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'resources' on Instance uuid 99f22198-1a65-4d0d-b665-90c7063dbdb9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1861.149163] env[62519]: INFO nova.scheduler.client.report [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Deleted allocations for instance 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17 [ 1861.259229] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803144, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.706194} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.259775] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 9327a897-8e4f-4c59-952e-aecfac4028e0/9327a897-8e4f-4c59-952e-aecfac4028e0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1861.260684] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1861.261164] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75d92a32-1206-4888-8a2d-acd7fd7cb22e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.268653] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1861.268653] env[62519]: value = "task-1803146" [ 1861.268653] env[62519]: _type = "Task" [ 1861.268653] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.277572] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.294436] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.533507] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803143, 'name': Rename_Task, 'duration_secs': 1.226289} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.534088] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1861.534088] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-537f6c63-008e-47c3-b143-22bd4f449d0b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.545801] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1861.545801] env[62519]: value = "task-1803147" [ 1861.545801] env[62519]: _type = "Task" [ 1861.545801] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.556341] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803147, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.664738] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ec833f3f-01a3-48fa-bb0f-21cdc93c5d7c tempest-ServersTestManualDisk-231377638 tempest-ServersTestManualDisk-231377638-project-member] Lock "0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.373s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.707976] env[62519]: DEBUG nova.network.neutron [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updating instance_info_cache with network_info: [{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.782356] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.887521] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcf5361-41ba-44a5-996a-0171808340b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.896180] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453b87e7-0c4a-47a5-a14e-eda1cd0a0462 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.931824] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3150b2-e00c-44aa-b507-60e61bc033e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.942235] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a26288-0a1d-4b8f-8367-91dbb837d383 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.958437] env[62519]: DEBUG nova.compute.provider_tree [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1862.059860] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803147, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.213975] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Releasing lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.214350] env[62519]: DEBUG nova.compute.manager [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Inject network info {{(pid=62519) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7696}} [ 1862.214591] env[62519]: DEBUG nova.compute.manager [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] network_info to inject: |[{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7697}} [ 1862.219626] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Reconfiguring VM instance to set the machine id {{(pid=62519) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1862.219836] env[62519]: DEBUG oslo_concurrency.lockutils [req-6c573f76-646f-41a1-8e46-70236583d681 req-74a5c945-e3e8-461f-a1fa-82fbf6b3d8db service nova] Acquired lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.220027] env[62519]: DEBUG nova.network.neutron [req-6c573f76-646f-41a1-8e46-70236583d681 req-74a5c945-e3e8-461f-a1fa-82fbf6b3d8db service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Refreshing network info cache for port 89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1862.221361] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecf1a530-f6f4-418c-83a9-cd8eaf95e811 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.244439] env[62519]: DEBUG oslo_vmware.api [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1862.244439] env[62519]: value = "task-1803148" [ 1862.244439] env[62519]: _type = "Task" [ 1862.244439] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.255187] env[62519]: DEBUG oslo_vmware.api [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1803148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.279997] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.462946] env[62519]: DEBUG nova.scheduler.client.report [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1862.559869] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803147, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.756659] env[62519]: DEBUG oslo_vmware.api [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1803148, 'name': ReconfigVM_Task, 'duration_secs': 0.171364} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.757325] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8b392ec4-9914-40da-a936-c7e4985c1427 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Reconfigured VM instance to set the machine id {{(pid=62519) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1862.786864] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803146, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.107453} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.787360] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1862.788704] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e12f4b2-1c74-4129-99b3-4430f937a760 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.818071] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 9327a897-8e4f-4c59-952e-aecfac4028e0/9327a897-8e4f-4c59-952e-aecfac4028e0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1862.819365] env[62519]: DEBUG nova.network.neutron [req-6c573f76-646f-41a1-8e46-70236583d681 req-74a5c945-e3e8-461f-a1fa-82fbf6b3d8db service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updated VIF entry in instance network info cache for port 89b7045e-9b9a-45f2-b8e7-e5d063bd1605. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1862.819821] env[62519]: DEBUG nova.network.neutron [req-6c573f76-646f-41a1-8e46-70236583d681 req-74a5c945-e3e8-461f-a1fa-82fbf6b3d8db service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updating instance_info_cache with network_info: [{"id": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "address": "fa:16:3e:6d:32:a1", "network": {"id": "65b31600-7594-4ce0-8103-b44b57a6db75", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-145323623-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17c3617401cc4be0bbb6851dba631e98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b7045e-9b", "ovs_interfaceid": "89b7045e-9b9a-45f2-b8e7-e5d063bd1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.823418] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f7c4385-6d0f-44db-a9da-1acd5f5deeca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.840684] env[62519]: DEBUG oslo_concurrency.lockutils [req-6c573f76-646f-41a1-8e46-70236583d681 req-74a5c945-e3e8-461f-a1fa-82fbf6b3d8db service nova] Releasing lock "refresh_cache-9f71845a-e80c-4822-b3de-717f1d83bc49" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.841736] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee13118-8c15-4ff8-9652-954b4f11ad07 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.866365] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance 'eb5de0a4-0af3-4731-ab30-3ae3d72207a7' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1862.875672] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1862.875672] env[62519]: value = "task-1803150" [ 1862.875672] env[62519]: _type = "Task" [ 1862.875672] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.884172] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.960235] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "9f71845a-e80c-4822-b3de-717f1d83bc49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.960542] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.960714] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "9f71845a-e80c-4822-b3de-717f1d83bc49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.960886] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.961073] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.967027] env[62519]: INFO nova.compute.manager [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Terminating instance [ 1862.968995] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.849s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.994078] env[62519]: INFO nova.scheduler.client.report [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Deleted allocations for instance 99f22198-1a65-4d0d-b665-90c7063dbdb9 [ 1863.058771] env[62519]: DEBUG oslo_vmware.api [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803147, 'name': PowerOnVM_Task, 'duration_secs': 1.291945} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.060087] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1863.060330] env[62519]: INFO nova.compute.manager [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Took 10.59 seconds to spawn the instance on the hypervisor. [ 1863.060473] env[62519]: DEBUG nova.compute.manager [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1863.061517] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd9f8f9-8238-45f2-b30c-d029d48dbd9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.155522] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "618a1db6-4056-4380-b5df-395ac14165a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.155772] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "618a1db6-4056-4380-b5df-395ac14165a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.375028] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1863.376361] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01c55c69-d38a-4292-9732-915ac8464a13 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.393866] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803150, 'name': ReconfigVM_Task, 'duration_secs': 0.321201} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.395524] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 9327a897-8e4f-4c59-952e-aecfac4028e0/9327a897-8e4f-4c59-952e-aecfac4028e0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1863.396365] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1863.396365] env[62519]: value = "task-1803151" [ 1863.396365] env[62519]: _type = "Task" [ 1863.396365] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.396485] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6cfb1eb-37a4-4d12-aaab-528aba528a19 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.416614] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.416981] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1863.416981] env[62519]: value = "task-1803152" [ 1863.416981] env[62519]: _type = "Task" [ 1863.416981] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.432652] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803152, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.467905] env[62519]: DEBUG nova.compute.manager [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1863.468272] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1863.469563] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc2979f-130d-43aa-a580-41348300ebb9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.481705] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1863.481908] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90b92eab-c95c-4ed3-8a66-303665121daa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.493017] env[62519]: DEBUG oslo_vmware.api [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1863.493017] env[62519]: value = "task-1803153" [ 1863.493017] env[62519]: _type = "Task" [ 1863.493017] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.504590] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c14e9351-9053-4f30-bc7f-20b46656893b tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "99f22198-1a65-4d0d-b665-90c7063dbdb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.965s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.511424] env[62519]: DEBUG oslo_vmware.api [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1803153, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.591897] env[62519]: INFO nova.compute.manager [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Took 16.86 seconds to build instance. [ 1863.658550] env[62519]: DEBUG nova.compute.manager [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1863.912885] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803151, 'name': PowerOffVM_Task, 'duration_secs': 0.225177} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.913942] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1863.914273] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance 'eb5de0a4-0af3-4731-ab30-3ae3d72207a7' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1863.933226] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803152, 'name': Rename_Task, 'duration_secs': 0.172883} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.933689] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1863.934332] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc4aa5f8-f1bc-470b-bc2d-68c393e3c77d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.942350] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1863.942350] env[62519]: value = "task-1803154" [ 1863.942350] env[62519]: _type = "Task" [ 1863.942350] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.953646] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803154, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.002334] env[62519]: DEBUG oslo_vmware.api [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1803153, 'name': PowerOffVM_Task, 'duration_secs': 0.374108} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.002906] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1864.002906] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1864.003072] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cc88b41-e54b-4469-8045-0eb88d3b8c0f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.173626] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1864.173858] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1864.174098] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Deleting the datastore file [datastore1] 9f71845a-e80c-4822-b3de-717f1d83bc49 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1864.174372] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79d43f36-6a3a-48eb-b95c-edcb15f6b01f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.185554] env[62519]: DEBUG oslo_vmware.api [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for the task: (returnval){ [ 1864.185554] env[62519]: value = "task-1803156" [ 1864.185554] env[62519]: _type = "Task" [ 1864.185554] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.196270] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.196523] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.198146] env[62519]: INFO nova.compute.claims [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1864.204102] env[62519]: DEBUG oslo_vmware.api [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1803156, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.223851] env[62519]: DEBUG oslo_vmware.rw_handles [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5220744e-dbaf-95c4-4c63-6db42741fabe/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1864.223937] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a86ea0-4e4a-4c50-adbf-09020e8a919e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.236416] env[62519]: DEBUG oslo_vmware.rw_handles [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5220744e-dbaf-95c4-4c63-6db42741fabe/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1864.236644] env[62519]: ERROR oslo_vmware.rw_handles [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5220744e-dbaf-95c4-4c63-6db42741fabe/disk-0.vmdk due to incomplete transfer. [ 1864.237162] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8304aac7-3568-44ca-b257-3daf9a3f1dff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.249605] env[62519]: DEBUG oslo_vmware.rw_handles [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5220744e-dbaf-95c4-4c63-6db42741fabe/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1864.250242] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Uploaded image cce8cdd4-93c5-4d31-9e3f-2461175deeb2 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1864.253241] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1864.254019] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d8292945-8e7f-4ace-ad4b-4cca9a986aed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.263532] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1864.263532] env[62519]: value = "task-1803157" [ 1864.263532] env[62519]: _type = "Task" [ 1864.263532] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.276066] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803157, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.430679] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1864.431770] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1864.432583] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1864.432583] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1864.432583] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1864.432750] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1864.432858] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1864.433186] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1864.433323] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1864.433490] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1864.433876] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1864.440533] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5e441b2-4fe0-4c02-bb44-b57ee6f060bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.468754] env[62519]: DEBUG oslo_vmware.api [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803154, 'name': PowerOnVM_Task, 'duration_secs': 0.518368} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.470514] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1864.470702] env[62519]: INFO nova.compute.manager [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Took 9.64 seconds to spawn the instance on the hypervisor. [ 1864.471102] env[62519]: DEBUG nova.compute.manager [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1864.471522] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1864.471522] env[62519]: value = "task-1803158" [ 1864.471522] env[62519]: _type = "Task" [ 1864.471522] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.473146] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3e1171-9f7d-4f67-b7ee-10bef0488ea1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.491556] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803158, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.700993] env[62519]: DEBUG oslo_vmware.api [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Task: {'id': task-1803156, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148334} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.700993] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1864.700993] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1864.700993] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1864.700993] env[62519]: INFO nova.compute.manager [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1864.700993] env[62519]: DEBUG oslo.service.loopingcall [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1864.700993] env[62519]: DEBUG nova.compute.manager [-] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1864.700993] env[62519]: DEBUG nova.network.neutron [-] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1864.773889] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803157, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.866425] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquiring lock "71edba06-1628-4749-8a51-5a9bcb003fda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.866528] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "71edba06-1628-4749-8a51-5a9bcb003fda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.002717] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803158, 'name': ReconfigVM_Task, 'duration_secs': 0.322968} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.006597] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance 'eb5de0a4-0af3-4731-ab30-3ae3d72207a7' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1865.013529] env[62519]: INFO nova.compute.manager [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Took 17.25 seconds to build instance. [ 1865.101225] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e8f93fa-12d9-4bc8-a7b4-b29377242bb0 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.379s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.282760] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803157, 'name': Destroy_Task, 'duration_secs': 0.80992} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.283167] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Destroyed the VM [ 1865.283444] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1865.283730] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5e5c0533-d220-40d9-a697-53c0844d16bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.297340] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1865.297340] env[62519]: value = "task-1803160" [ 1865.297340] env[62519]: _type = "Task" [ 1865.297340] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.309537] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803160, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.369660] env[62519]: DEBUG nova.compute.manager [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1865.518766] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1865.518766] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1865.519032] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1865.519194] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1865.519361] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1865.519361] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1865.519733] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1865.519830] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1865.519992] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1865.520241] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1865.520473] env[62519]: DEBUG nova.virt.hardware [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1865.529939] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Reconfiguring VM instance instance-00000063 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1865.534735] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3d37572-3d8a-4f41-b9d8-728dfe83d683 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.572037] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1865.572037] env[62519]: value = "task-1803161" [ 1865.572037] env[62519]: _type = "Task" [ 1865.572037] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.582457] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803161, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.639600] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf8e629-ca09-485d-989c-bf5fc143c66d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.649668] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e997fc-b32f-409f-84e3-2590ae56ade6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.695916] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4696b915-03d5-4b75-b010-65d48accd815 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.708937] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046f3fc0-eefc-47a6-b90f-e80e56a34bac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.719641] env[62519]: DEBUG nova.compute.manager [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1865.719641] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b8f356-2764-47e6-967d-4a2a344dc386 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.739068] env[62519]: DEBUG nova.compute.provider_tree [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.743055] env[62519]: DEBUG nova.compute.manager [req-6b1f7807-779b-42da-b5cb-3d4d9e9171bd req-249e455c-38cd-465b-9501-6d5642013fad service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Received event network-vif-deleted-89b7045e-9b9a-45f2-b8e7-e5d063bd1605 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1865.743422] env[62519]: INFO nova.compute.manager [req-6b1f7807-779b-42da-b5cb-3d4d9e9171bd req-249e455c-38cd-465b-9501-6d5642013fad service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Neutron deleted interface 89b7045e-9b9a-45f2-b8e7-e5d063bd1605; detaching it from the instance and deleting it from the info cache [ 1865.744942] env[62519]: DEBUG nova.network.neutron [req-6b1f7807-779b-42da-b5cb-3d4d9e9171bd req-249e455c-38cd-465b-9501-6d5642013fad service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.810753] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803160, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.899911] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.059070] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c15f2384-59b6-42a1-b31c-4c8a30f2f7c2 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "9327a897-8e4f-4c59-952e-aecfac4028e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.300s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.059070] env[62519]: DEBUG nova.network.neutron [-] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.083397] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803161, 'name': ReconfigVM_Task, 'duration_secs': 0.189547} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.083752] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Reconfigured VM instance instance-00000063 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1866.084539] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf32083-f67e-4099-8292-82ef3d955bc4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.110162] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] eb5de0a4-0af3-4731-ab30-3ae3d72207a7/eb5de0a4-0af3-4731-ab30-3ae3d72207a7.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1866.110847] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77e3d78a-697b-4e5c-9639-98f5eeb3ab21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.131206] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1866.131206] env[62519]: value = "task-1803162" [ 1866.131206] env[62519]: _type = "Task" [ 1866.131206] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.141186] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803162, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.245523] env[62519]: INFO nova.compute.manager [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] instance snapshotting [ 1866.247383] env[62519]: DEBUG nova.scheduler.client.report [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1866.253413] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdbdc45-04b2-438e-9d73-9ed9e86f9e39 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.263028] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1a35d71-4c60-41ff-8fd9-eca939b21db5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.283444] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fc4e30-5e17-4433-8d83-8082cd35a1a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.289038] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd09880a-9383-4d81-90e8-2aa033bf7a2f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.314901] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803160, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.331877] env[62519]: DEBUG nova.compute.manager [req-6b1f7807-779b-42da-b5cb-3d4d9e9171bd req-249e455c-38cd-465b-9501-6d5642013fad service nova] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Detach interface failed, port_id=89b7045e-9b9a-45f2-b8e7-e5d063bd1605, reason: Instance 9f71845a-e80c-4822-b3de-717f1d83bc49 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1866.477965] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "f3665f89-1747-4567-9e56-c937d4ac81da" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.478239] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.544426] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.544730] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.561877] env[62519]: INFO nova.compute.manager [-] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Took 1.86 seconds to deallocate network for instance. [ 1866.642937] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803162, 'name': ReconfigVM_Task, 'duration_secs': 0.28433} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.643260] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Reconfigured VM instance instance-00000063 to attach disk [datastore1] eb5de0a4-0af3-4731-ab30-3ae3d72207a7/eb5de0a4-0af3-4731-ab30-3ae3d72207a7.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1866.643538] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance 'eb5de0a4-0af3-4731-ab30-3ae3d72207a7' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1866.717051] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "9327a897-8e4f-4c59-952e-aecfac4028e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.717296] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "9327a897-8e4f-4c59-952e-aecfac4028e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.717546] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "9327a897-8e4f-4c59-952e-aecfac4028e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.717739] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "9327a897-8e4f-4c59-952e-aecfac4028e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.717911] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "9327a897-8e4f-4c59-952e-aecfac4028e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.720359] env[62519]: INFO nova.compute.manager [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Terminating instance [ 1866.761888] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.762438] env[62519]: DEBUG nova.compute.manager [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1866.765210] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.867s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.766589] env[62519]: INFO nova.compute.claims [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1866.811726] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1866.812045] env[62519]: DEBUG oslo_vmware.api [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803160, 'name': RemoveSnapshot_Task, 'duration_secs': 1.050923} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.812266] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c9484ba5-770c-4c08-b608-ed880b6f0bfa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.814116] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1866.814347] env[62519]: INFO nova.compute.manager [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Took 15.02 seconds to snapshot the instance on the hypervisor. [ 1866.824184] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1866.824184] env[62519]: value = "task-1803163" [ 1866.824184] env[62519]: _type = "Task" [ 1866.824184] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.835302] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803163, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.982056] env[62519]: DEBUG nova.compute.utils [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1867.047423] env[62519]: DEBUG nova.compute.manager [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1867.067834] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.150633] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73150c73-9b3c-4753-807c-0c95e179b5e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.172260] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72680703-b534-4015-a5c3-64ba5370d277 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.193032] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance 'eb5de0a4-0af3-4731-ab30-3ae3d72207a7' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1867.223849] env[62519]: DEBUG nova.compute.manager [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1867.224074] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1867.224965] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07e1a38-0923-4a7c-a9f2-5e98e252b075 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.233503] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1867.233765] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d99f35c4-bd8a-44c4-a7da-379a2d26f0bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.242148] env[62519]: DEBUG oslo_vmware.api [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1867.242148] env[62519]: value = "task-1803165" [ 1867.242148] env[62519]: _type = "Task" [ 1867.242148] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.252365] env[62519]: DEBUG oslo_vmware.api [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.271553] env[62519]: DEBUG nova.compute.utils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1867.275551] env[62519]: DEBUG nova.compute.manager [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1867.275551] env[62519]: DEBUG nova.network.neutron [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1867.334775] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803163, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.336606] env[62519]: DEBUG nova.policy [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c586ba4158b947b0a25d1614c17ebb51', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12977ed65a1b410a987b049e9d1dce3e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1867.360737] env[62519]: DEBUG nova.compute.manager [None req-3166e82a-af94-4c40-a0b0-82f4779bc484 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Found 2 images (rotation: 2) {{(pid=62519) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4998}} [ 1867.485770] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.571539] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.687439] env[62519]: DEBUG nova.network.neutron [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Successfully created port: 872d6b35-c4a4-4975-8757-3c87471ba5df {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1867.752788] env[62519]: DEBUG oslo_vmware.api [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803165, 'name': PowerOffVM_Task, 'duration_secs': 0.218256} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.756509] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1867.756509] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1867.756509] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0aec3de5-889c-4bca-b450-ed40d0cd0009 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.767814] env[62519]: DEBUG nova.network.neutron [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Port 98c5e48e-5515-4c54-af43-86a9b283477d binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1867.776421] env[62519]: DEBUG nova.compute.manager [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1867.842421] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803163, 'name': CreateSnapshot_Task, 'duration_secs': 0.832259} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.846444] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1867.848636] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae1ac26-2b83-42ee-af6f-f97aee0c3a21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.866384] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1867.866705] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1867.866963] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleting the datastore file [datastore1] 9327a897-8e4f-4c59-952e-aecfac4028e0 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1867.867466] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d41e104-b035-4957-82cb-14023326f513 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.874848] env[62519]: DEBUG oslo_vmware.api [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1867.874848] env[62519]: value = "task-1803167" [ 1867.874848] env[62519]: _type = "Task" [ 1867.874848] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.888032] env[62519]: DEBUG oslo_vmware.api [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.109901] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f33c0c-a529-4cbf-9dff-53f05e442be5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.118864] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aabe191-63d9-40af-aecb-957ee2460588 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.150874] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3db31d-cf14-48d1-82cf-9e5e0ef04f6f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.159693] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cfa71a-4fb3-4275-b258-4bcca17339dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.174710] env[62519]: DEBUG nova.compute.provider_tree [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.241930] env[62519]: DEBUG nova.compute.manager [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1868.242997] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887981dd-40d1-4156-8563-b5a526cf522e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.368569] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1868.370147] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fe8fb021-ebfd-4261-977a-2b8e8eb2d8da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.385800] env[62519]: DEBUG oslo_vmware.api [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160653} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.387047] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1868.387321] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1868.387429] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1868.387614] env[62519]: INFO nova.compute.manager [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1868.387894] env[62519]: DEBUG oslo.service.loopingcall [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.388218] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1868.388218] env[62519]: value = "task-1803168" [ 1868.388218] env[62519]: _type = "Task" [ 1868.388218] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.388431] env[62519]: DEBUG nova.compute.manager [-] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1868.388531] env[62519]: DEBUG nova.network.neutron [-] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1868.400096] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803168, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.580146] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "f3665f89-1747-4567-9e56-c937d4ac81da" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.580739] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.580860] env[62519]: INFO nova.compute.manager [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Attaching volume 6af3bcc3-934f-4c24-b76d-cd93f73935c8 to /dev/sdb [ 1868.620404] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed12f1ad-9a35-4dd0-bd9d-44f48beff11e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.628801] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89beba6c-5123-4dd8-ba25-c370c65b77ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.645321] env[62519]: DEBUG nova.virt.block_device [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating existing volume attachment record: 276ca037-d8c6-414d-8c52-a6adbe52f8c5 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1868.677961] env[62519]: DEBUG nova.scheduler.client.report [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1868.704439] env[62519]: DEBUG nova.compute.manager [req-9c5c6132-205a-492f-90b7-fe77138c38ea req-41c3ca29-6de9-4d64-b279-8e047407276e service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Received event network-vif-deleted-885622bf-cd63-46f6-ac64-de4e37a95265 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1868.704618] env[62519]: INFO nova.compute.manager [req-9c5c6132-205a-492f-90b7-fe77138c38ea req-41c3ca29-6de9-4d64-b279-8e047407276e service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Neutron deleted interface 885622bf-cd63-46f6-ac64-de4e37a95265; detaching it from the instance and deleting it from the info cache [ 1868.704753] env[62519]: DEBUG nova.network.neutron [req-9c5c6132-205a-492f-90b7-fe77138c38ea req-41c3ca29-6de9-4d64-b279-8e047407276e service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.754369] env[62519]: INFO nova.compute.manager [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] instance snapshotting [ 1868.755059] env[62519]: DEBUG nova.objects.instance [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'flavor' on Instance uuid 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1868.793637] env[62519]: DEBUG nova.compute.manager [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1868.796162] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.796426] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.796625] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.825137] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1868.825137] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1868.825137] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1868.825137] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1868.825137] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1868.825414] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1868.825414] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1868.825556] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1868.825727] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1868.825885] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1868.826067] env[62519]: DEBUG nova.virt.hardware [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1868.827211] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0c9944-32d5-41da-9ff8-a0eec1fbb453 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.835088] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175e8416-cd33-455c-8f29-8af7cbbbae43 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.903198] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803168, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.187131] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.420s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.187131] env[62519]: DEBUG nova.compute.manager [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1869.188594] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.121s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.188823] env[62519]: DEBUG nova.objects.instance [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lazy-loading 'resources' on Instance uuid 9f71845a-e80c-4822-b3de-717f1d83bc49 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.189800] env[62519]: DEBUG nova.network.neutron [-] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.208065] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95cf5795-e188-45a5-bca3-29985aec01b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.218992] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3c0903-5904-4489-bf96-59fe3fa04e79 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.232019] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.232307] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.232535] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.232741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.232929] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.234924] env[62519]: INFO nova.compute.manager [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Terminating instance [ 1869.266020] env[62519]: DEBUG nova.compute.manager [req-9c5c6132-205a-492f-90b7-fe77138c38ea req-41c3ca29-6de9-4d64-b279-8e047407276e service nova] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Detach interface failed, port_id=885622bf-cd63-46f6-ac64-de4e37a95265, reason: Instance 9327a897-8e4f-4c59-952e-aecfac4028e0 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1869.267234] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e137e1-be87-44f9-be45-c4852610d5b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.287015] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cb1105-f321-4a63-9995-b2d8fd70a12c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.320621] env[62519]: DEBUG nova.network.neutron [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Successfully updated port: 872d6b35-c4a4-4975-8757-3c87471ba5df {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1869.403742] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803168, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.695476] env[62519]: INFO nova.compute.manager [-] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Took 1.31 seconds to deallocate network for instance. [ 1869.699017] env[62519]: DEBUG nova.compute.utils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1869.706087] env[62519]: DEBUG nova.compute.manager [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1869.706324] env[62519]: DEBUG nova.network.neutron [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1869.740585] env[62519]: DEBUG nova.compute.manager [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1869.740806] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1869.741698] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa3913d-901b-47aa-a5da-2824a09036a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.752622] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1869.754297] env[62519]: DEBUG nova.policy [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29dc72d5b4f34813a20a2d85c54ab0ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94d952826a35452f9ac28e0cbf14a980', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1869.755284] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f38fb76b-bc98-4fca-9a6f-943124e8d3e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.762697] env[62519]: DEBUG oslo_vmware.api [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1869.762697] env[62519]: value = "task-1803173" [ 1869.762697] env[62519]: _type = "Task" [ 1869.762697] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.772231] env[62519]: DEBUG oslo_vmware.api [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803173, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.799017] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1869.799336] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-164e17ca-1d5a-486e-b85b-3a27b8883b89 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.807605] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1869.807605] env[62519]: value = "task-1803174" [ 1869.807605] env[62519]: _type = "Task" [ 1869.807605] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.823734] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803174, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.828043] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-618a1db6-4056-4380-b5df-395ac14165a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.828043] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-618a1db6-4056-4380-b5df-395ac14165a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.828043] env[62519]: DEBUG nova.network.neutron [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1869.861060] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.861292] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.861495] env[62519]: DEBUG nova.network.neutron [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1869.903327] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803168, 'name': CloneVM_Task, 'duration_secs': 1.437071} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.903439] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Created linked-clone VM from snapshot [ 1869.904542] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c8dc06-922f-4974-9686-57edc5c1a701 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.916647] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Uploading image 863e09af-587e-4095-b092-e1a3d9d743d3 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1869.936255] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1869.936651] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bd9c17bb-abf2-4290-8bb6-4d9f074f78bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.949155] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1869.949155] env[62519]: value = "task-1803175" [ 1869.949155] env[62519]: _type = "Task" [ 1869.949155] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.957842] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.000927] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0449ed-1f05-49fb-b153-f0a7ae0a9db6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.009257] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6bb2f1-c641-4f16-bb2e-b188cca393dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.042682] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1747a7-69d1-446b-8d80-6faeed1c76ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.052241] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22511fa4-5a28-4f50-9507-0ca3c41ed648 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.067339] env[62519]: DEBUG nova.compute.provider_tree [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1870.085413] env[62519]: DEBUG nova.network.neutron [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Successfully created port: ff83eb74-d7d7-49d4-8a99-813264a7d1ab {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1870.207637] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.211393] env[62519]: DEBUG nova.compute.manager [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1870.272483] env[62519]: DEBUG oslo_vmware.api [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803173, 'name': PowerOffVM_Task, 'duration_secs': 0.238339} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.272764] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1870.272932] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1870.273189] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-049c8371-6951-45ae-9571-93ab82c6652f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.322558] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803174, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.358115] env[62519]: DEBUG nova.network.neutron [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1870.459290] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.579020] env[62519]: DEBUG nova.scheduler.client.report [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1870.618420] env[62519]: DEBUG nova.network.neutron [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Updating instance_info_cache with network_info: [{"id": "872d6b35-c4a4-4975-8757-3c87471ba5df", "address": "fa:16:3e:f1:f5:3b", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap872d6b35-c4", "ovs_interfaceid": "872d6b35-c4a4-4975-8757-3c87471ba5df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.672742] env[62519]: DEBUG nova.network.neutron [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance_info_cache with network_info: [{"id": "98c5e48e-5515-4c54-af43-86a9b283477d", "address": "fa:16:3e:72:79:66", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c5e48e-55", "ovs_interfaceid": "98c5e48e-5515-4c54-af43-86a9b283477d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.747495] env[62519]: DEBUG nova.compute.manager [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Received event network-vif-plugged-872d6b35-c4a4-4975-8757-3c87471ba5df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1870.747653] env[62519]: DEBUG oslo_concurrency.lockutils [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] Acquiring lock "618a1db6-4056-4380-b5df-395ac14165a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.747870] env[62519]: DEBUG oslo_concurrency.lockutils [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] Lock "618a1db6-4056-4380-b5df-395ac14165a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.748050] env[62519]: DEBUG oslo_concurrency.lockutils [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] Lock "618a1db6-4056-4380-b5df-395ac14165a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.748226] env[62519]: DEBUG nova.compute.manager [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] No waiting events found dispatching network-vif-plugged-872d6b35-c4a4-4975-8757-3c87471ba5df {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1870.748395] env[62519]: WARNING nova.compute.manager [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Received unexpected event network-vif-plugged-872d6b35-c4a4-4975-8757-3c87471ba5df for instance with vm_state building and task_state spawning. [ 1870.748555] env[62519]: DEBUG nova.compute.manager [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Received event network-changed-872d6b35-c4a4-4975-8757-3c87471ba5df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1870.748740] env[62519]: DEBUG nova.compute.manager [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Refreshing instance network info cache due to event network-changed-872d6b35-c4a4-4975-8757-3c87471ba5df. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1870.748927] env[62519]: DEBUG oslo_concurrency.lockutils [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] Acquiring lock "refresh_cache-618a1db6-4056-4380-b5df-395ac14165a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.823934] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803174, 'name': CreateSnapshot_Task, 'duration_secs': 0.921237} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.824163] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1870.824930] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aec1915-eba2-4bbb-b44b-1547735185fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.960606] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.080950] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.083994] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.512s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.085966] env[62519]: INFO nova.compute.claims [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1871.117526] env[62519]: INFO nova.scheduler.client.report [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Deleted allocations for instance 9f71845a-e80c-4822-b3de-717f1d83bc49 [ 1871.121775] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-618a1db6-4056-4380-b5df-395ac14165a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.122071] env[62519]: DEBUG nova.compute.manager [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Instance network_info: |[{"id": "872d6b35-c4a4-4975-8757-3c87471ba5df", "address": "fa:16:3e:f1:f5:3b", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap872d6b35-c4", "ovs_interfaceid": "872d6b35-c4a4-4975-8757-3c87471ba5df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1871.122557] env[62519]: DEBUG oslo_concurrency.lockutils [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] Acquired lock "refresh_cache-618a1db6-4056-4380-b5df-395ac14165a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.122783] env[62519]: DEBUG nova.network.neutron [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Refreshing network info cache for port 872d6b35-c4a4-4975-8757-3c87471ba5df {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1871.124053] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:f5:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e028024-a9c1-4cae-8849-ea770a7ae0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '872d6b35-c4a4-4975-8757-3c87471ba5df', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1871.133233] env[62519]: DEBUG oslo.service.loopingcall [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1871.134568] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1871.134822] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06ee78bd-83d5-4823-9408-21bf6cc079bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.157775] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1871.157775] env[62519]: value = "task-1803179" [ 1871.157775] env[62519]: _type = "Task" [ 1871.157775] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.167971] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.175097] env[62519]: DEBUG oslo_concurrency.lockutils [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.220636] env[62519]: DEBUG nova.compute.manager [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1871.250936] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1871.250936] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1871.250936] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1871.251154] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1871.251327] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1871.251542] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1871.251842] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1871.252033] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1871.252248] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1871.252488] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1871.252756] env[62519]: DEBUG nova.virt.hardware [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1871.253902] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffbe42e-639a-4347-82c3-828f4f39313d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.263937] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa45bb1-41fa-4aa2-8d36-7e95057772e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.343178] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1871.343497] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-61dfba42-e492-4b14-96fa-2d9ec1ccc602 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.353821] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1871.353821] env[62519]: value = "task-1803180" [ 1871.353821] env[62519]: _type = "Task" [ 1871.353821] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.362312] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.461189] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.559681] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.559985] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.560374] env[62519]: DEBUG nova.objects.instance [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'flavor' on Instance uuid 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1871.625342] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2d7a72a4-dcd0-40bd-adb4-9b6a093c2577 tempest-AttachInterfacesUnderV243Test-2090758485 tempest-AttachInterfacesUnderV243Test-2090758485-project-member] Lock "9f71845a-e80c-4822-b3de-717f1d83bc49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.665s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.671037] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.700354] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b211f4-af73-4d88-9bff-0c993b766c86 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.723650] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bad9f6-d27b-41d8-acc0-7f0877fb5807 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.731725] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance 'eb5de0a4-0af3-4731-ab30-3ae3d72207a7' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1871.864788] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.865749] env[62519]: DEBUG nova.network.neutron [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Updated VIF entry in instance network info cache for port 872d6b35-c4a4-4975-8757-3c87471ba5df. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1871.866095] env[62519]: DEBUG nova.network.neutron [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Updating instance_info_cache with network_info: [{"id": "872d6b35-c4a4-4975-8757-3c87471ba5df", "address": "fa:16:3e:f1:f5:3b", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap872d6b35-c4", "ovs_interfaceid": "872d6b35-c4a4-4975-8757-3c87471ba5df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.961650] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.136156] env[62519]: DEBUG nova.objects.instance [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'pci_requests' on Instance uuid 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1872.171614] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.239217] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1872.239762] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18c84ed9-7006-467b-903b-3cab69fac0e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.248995] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1872.248995] env[62519]: value = "task-1803181" [ 1872.248995] env[62519]: _type = "Task" [ 1872.248995] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.260346] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803181, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.335536] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267a09b9-1bf8-4411-a0cf-3ffaafbc821a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.343508] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78daa956-1d49-44b9-b4a1-2447b169a0ff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.376324] env[62519]: DEBUG oslo_concurrency.lockutils [req-19752656-2ceb-47bb-bf23-ce5532c079f5 req-c305a13e-e825-4a15-822e-dec59369e2ce service nova] Releasing lock "refresh_cache-618a1db6-4056-4380-b5df-395ac14165a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.380399] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6622a4-9ec4-4359-af57-d012807ee39a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.393103] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.394373] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448ef369-8fa7-45f9-a4da-448f51f71b95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.415038] env[62519]: DEBUG nova.compute.provider_tree [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1872.462663] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.639160] env[62519]: DEBUG nova.objects.base [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Object Instance<10bfd4ac-6f11-4c96-87a0-ce74bc1193c4> lazy-loaded attributes: flavor,pci_requests {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1872.639374] env[62519]: DEBUG nova.network.neutron [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1872.670467] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.703896] env[62519]: DEBUG nova.policy [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eca5c7b079d4785941d68d7c51df5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63a46158057949478e5c79fbe0d4d5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1872.762104] env[62519]: DEBUG oslo_vmware.api [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803181, 'name': PowerOnVM_Task, 'duration_secs': 0.400612} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.762390] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1872.762582] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-570add47-f143-4e78-8238-e22c3295b4d0 tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance 'eb5de0a4-0af3-4731-ab30-3ae3d72207a7' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1872.893628] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.917903] env[62519]: DEBUG nova.scheduler.client.report [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1872.966537] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.171683] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.191502] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1873.191746] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373839', 'volume_id': '6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'name': 'volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3665f89-1747-4567-9e56-c937d4ac81da', 'attached_at': '', 'detached_at': '', 'volume_id': '6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'serial': '6af3bcc3-934f-4c24-b76d-cd93f73935c8'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1873.192617] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba11398-e9b8-40f7-8773-5467fa32dedd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.217487] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfbfc5f-d404-4fb1-809f-0ad0a3d658fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.244049] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8/volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1873.244049] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbfd331b-abf6-473b-81ad-293768bd4eab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.263110] env[62519]: DEBUG oslo_vmware.api [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1873.263110] env[62519]: value = "task-1803182" [ 1873.263110] env[62519]: _type = "Task" [ 1873.263110] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.266772] env[62519]: DEBUG nova.network.neutron [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Successfully created port: c5603e79-74fa-4628-bc67-0351b0407810 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1873.276456] env[62519]: DEBUG oslo_vmware.api [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803182, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.390604] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.422945] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.423611] env[62519]: DEBUG nova.compute.manager [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1873.426931] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.219s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.427215] env[62519]: DEBUG nova.objects.instance [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lazy-loading 'resources' on Instance uuid 9327a897-8e4f-4c59-952e-aecfac4028e0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1873.464229] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.672731] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.777113] env[62519]: DEBUG oslo_vmware.api [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803182, 'name': ReconfigVM_Task, 'duration_secs': 0.337221} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.778045] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfigured VM instance instance-0000005f to attach disk [datastore1] volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8/volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1873.783893] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93288d78-0605-40f9-a73b-ce3727b815a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.800250] env[62519]: DEBUG oslo_vmware.api [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1873.800250] env[62519]: value = "task-1803183" [ 1873.800250] env[62519]: _type = "Task" [ 1873.800250] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.809698] env[62519]: DEBUG oslo_vmware.api [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803183, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.891156] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.931101] env[62519]: DEBUG nova.compute.utils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1873.931885] env[62519]: DEBUG nova.compute.manager [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1873.932729] env[62519]: DEBUG nova.network.neutron [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1873.964717] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.999954] env[62519]: DEBUG nova.policy [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '646dceb574c845f19ca0d9dca4874dc1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91c1732890db42f98f538f7a5ac0d542', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1874.179073] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.238611] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87636ea6-bfb0-4b77-8dc7-f939983f0adb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.247290] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a06a578-8f18-45e2-b963-3f259d7505e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.296972] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c80bda-a412-4aba-a3f9-eb92befe9057 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.301541] env[62519]: DEBUG nova.network.neutron [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Successfully created port: 64341ffd-0da7-4574-9702-3fc0a03eb0e3 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1874.311039] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0084153f-958b-48ac-b92f-f8026b2dc46d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.322557] env[62519]: DEBUG oslo_vmware.api [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803183, 'name': ReconfigVM_Task, 'duration_secs': 0.14425} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.323779] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373839', 'volume_id': '6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'name': 'volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3665f89-1747-4567-9e56-c937d4ac81da', 'attached_at': '', 'detached_at': '', 'volume_id': '6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'serial': '6af3bcc3-934f-4c24-b76d-cd93f73935c8'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1874.334531] env[62519]: DEBUG nova.compute.provider_tree [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1874.392052] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.438260] env[62519]: DEBUG nova.compute.manager [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1874.465751] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.676992] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.840027] env[62519]: DEBUG nova.scheduler.client.report [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1874.892297] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.966586] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.177031] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.312790] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.313108] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.313345] env[62519]: DEBUG nova.compute.manager [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Going to confirm migration 5 {{(pid=62519) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5235}} [ 1875.329041] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "31db4b14-0ba3-4159-accc-31c21bd81322" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.329041] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.345648] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.919s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.364922] env[62519]: INFO nova.scheduler.client.report [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted allocations for instance 9327a897-8e4f-4c59-952e-aecfac4028e0 [ 1875.394639] env[62519]: DEBUG nova.objects.instance [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lazy-loading 'flavor' on Instance uuid f3665f89-1747-4567-9e56-c937d4ac81da {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1875.397021] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.448371] env[62519]: DEBUG nova.compute.manager [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1875.469455] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.477292] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1875.477560] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1875.477766] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1875.477974] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1875.478159] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1875.478316] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1875.478528] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1875.478690] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1875.478854] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1875.479030] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1875.479233] env[62519]: DEBUG nova.virt.hardware [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1875.480059] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aeb6925-1df0-4a4c-b5ea-7548455afdda {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.487776] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8245d1-a3ce-4c68-a783-1bfc094f5577 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.680238] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.831933] env[62519]: DEBUG nova.compute.manager [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1875.874174] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff1af54b-4603-4ea1-8970-e122b48e647b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "9327a897-8e4f-4c59-952e-aecfac4028e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.157s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.888186] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.888382] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquired lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.888556] env[62519]: DEBUG nova.network.neutron [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1875.888735] env[62519]: DEBUG nova.objects.instance [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lazy-loading 'info_cache' on Instance uuid eb5de0a4-0af3-4731-ab30-3ae3d72207a7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1875.896333] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.899890] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8565f1f2-914f-4323-b0fe-767409aeefcb tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.319s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.968194] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.178036] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.356484] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.356860] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.358349] env[62519]: INFO nova.compute.claims [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1876.396441] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.468315] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.678214] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.895017] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.968889] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.987706] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1876.988333] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1876.988333] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleting the datastore file [datastore1] 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1876.988558] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-928b9651-e3ed-446c-b8a8-0f75a3a6d10e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.998865] env[62519]: DEBUG oslo_vmware.api [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1876.998865] env[62519]: value = "task-1803184" [ 1876.998865] env[62519]: _type = "Task" [ 1876.998865] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.013051] env[62519]: DEBUG oslo_vmware.api [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.115935] env[62519]: DEBUG nova.network.neutron [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance_info_cache with network_info: [{"id": "98c5e48e-5515-4c54-af43-86a9b283477d", "address": "fa:16:3e:72:79:66", "network": {"id": "f9454e9e-4102-407f-acc0-7afee44ec876", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-684678313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "683d60927fdf424386ffcfaa344a7af6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98c5e48e-55", "ovs_interfaceid": "98c5e48e-5515-4c54-af43-86a9b283477d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.180860] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.397683] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.472062] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803175, 'name': Destroy_Task, 'duration_secs': 7.184905} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.472347] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Destroyed the VM [ 1877.472586] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1877.472852] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-96078cbb-57b0-4242-9bd4-1ebe7fa2ad5e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.481504] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1877.481504] env[62519]: value = "task-1803185" [ 1877.481504] env[62519]: _type = "Task" [ 1877.481504] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.490192] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803185, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.509515] env[62519]: DEBUG oslo_vmware.api [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803184, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141342} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.509866] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1877.510107] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1877.510400] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1877.510682] env[62519]: INFO nova.compute.manager [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Took 7.77 seconds to destroy the instance on the hypervisor. [ 1877.511037] env[62519]: DEBUG oslo.service.loopingcall [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1877.513399] env[62519]: DEBUG nova.compute.manager [-] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1877.513536] env[62519]: DEBUG nova.network.neutron [-] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1877.601864] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a26098d-7868-47a5-a842-9bed7e6f0bf2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.610256] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fe0c9b-96ab-4084-ab48-e0f835eae5d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.640384] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Releasing lock "refresh_cache-eb5de0a4-0af3-4731-ab30-3ae3d72207a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.640659] env[62519]: DEBUG nova.objects.instance [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lazy-loading 'migration_context' on Instance uuid eb5de0a4-0af3-4731-ab30-3ae3d72207a7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1877.643330] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae63fe12-edcb-4de6-8871-49203e318a4a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.653182] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d41d4c6-a3cd-429c-8f6c-12aa3d7f9b94 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.670441] env[62519]: DEBUG nova.compute.provider_tree [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.683937] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803179, 'name': CreateVM_Task, 'duration_secs': 6.041723} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.683937] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1877.683937] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.683937] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.684237] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1877.684732] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7475e6c-7fa5-42cf-984a-534710425e89 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.691967] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1877.691967] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fe160f-7f1e-d6aa-097b-e054caffa9f0" [ 1877.691967] env[62519]: _type = "Task" [ 1877.691967] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.702858] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fe160f-7f1e-d6aa-097b-e054caffa9f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.897901] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803180, 'name': CloneVM_Task, 'duration_secs': 6.481138} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.898250] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Created linked-clone VM from snapshot [ 1877.899059] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff48f25-d010-4ce5-a075-d1fb7d1f88dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.908738] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Uploading image abe7c237-ca29-424e-8add-cbb34800b1aa {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1877.937177] env[62519]: DEBUG oslo_vmware.rw_handles [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1877.937177] env[62519]: value = "vm-373842" [ 1877.937177] env[62519]: _type = "VirtualMachine" [ 1877.937177] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1877.937622] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-671a6c58-616b-484f-8824-e227d7acf2ff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.946545] env[62519]: DEBUG oslo_vmware.rw_handles [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease: (returnval){ [ 1877.946545] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f88212-6292-8686-e997-463ee60a94f9" [ 1877.946545] env[62519]: _type = "HttpNfcLease" [ 1877.946545] env[62519]: } obtained for exporting VM: (result){ [ 1877.946545] env[62519]: value = "vm-373842" [ 1877.946545] env[62519]: _type = "VirtualMachine" [ 1877.946545] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1877.946974] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the lease: (returnval){ [ 1877.946974] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f88212-6292-8686-e997-463ee60a94f9" [ 1877.946974] env[62519]: _type = "HttpNfcLease" [ 1877.946974] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1877.954199] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1877.954199] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f88212-6292-8686-e997-463ee60a94f9" [ 1877.954199] env[62519]: _type = "HttpNfcLease" [ 1877.954199] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1877.991924] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803185, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.143840] env[62519]: DEBUG nova.objects.base [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1878.145020] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c041b89-6459-4bbb-bd87-8639245e0378 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.166812] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8515253f-f4e3-480f-9eae-9ae12ea39838 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.174347] env[62519]: DEBUG nova.scheduler.client.report [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1878.177768] env[62519]: DEBUG oslo_vmware.api [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1878.177768] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52de114d-cc74-4975-b1ab-7812d2466f2f" [ 1878.177768] env[62519]: _type = "Task" [ 1878.177768] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.187055] env[62519]: DEBUG oslo_vmware.api [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52de114d-cc74-4975-b1ab-7812d2466f2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.202915] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fe160f-7f1e-d6aa-097b-e054caffa9f0, 'name': SearchDatastore_Task, 'duration_secs': 0.015173} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.207034] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.207347] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1878.207646] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.207723] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.208112] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1878.208153] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ab50008-07e7-4d81-81ec-51c9d334e80b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.220443] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1878.220513] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1878.221259] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad333a5e-3b45-4dc0-b385-96d45a887b31 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.227143] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1878.227143] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52718eed-dc58-dc67-57d3-ad3450ede6da" [ 1878.227143] env[62519]: _type = "Task" [ 1878.227143] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.236161] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52718eed-dc58-dc67-57d3-ad3450ede6da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.456008] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1878.456008] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f88212-6292-8686-e997-463ee60a94f9" [ 1878.456008] env[62519]: _type = "HttpNfcLease" [ 1878.456008] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1878.456444] env[62519]: DEBUG oslo_vmware.rw_handles [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1878.456444] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f88212-6292-8686-e997-463ee60a94f9" [ 1878.456444] env[62519]: _type = "HttpNfcLease" [ 1878.456444] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1878.457061] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b57fa3b-875d-481c-9efd-9a660be03574 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.466704] env[62519]: DEBUG oslo_vmware.rw_handles [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528eb81e-1d65-d01e-a2fc-6aa7b08f6f84/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1878.466911] env[62519]: DEBUG oslo_vmware.rw_handles [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528eb81e-1d65-d01e-a2fc-6aa7b08f6f84/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1878.533820] env[62519]: DEBUG oslo_vmware.api [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803185, 'name': RemoveSnapshot_Task, 'duration_secs': 0.62865} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.534121] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1878.557326] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9c73746f-62dd-4ace-812f-da28248df442 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.605851] env[62519]: DEBUG nova.network.neutron [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Successfully updated port: ff83eb74-d7d7-49d4-8a99-813264a7d1ab {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1878.612532] env[62519]: DEBUG nova.network.neutron [-] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.682106] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.682106] env[62519]: DEBUG nova.compute.manager [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1878.698125] env[62519]: DEBUG oslo_vmware.api [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52de114d-cc74-4975-b1ab-7812d2466f2f, 'name': SearchDatastore_Task, 'duration_secs': 0.016319} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.698693] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.699283] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.740894] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52718eed-dc58-dc67-57d3-ad3450ede6da, 'name': SearchDatastore_Task, 'duration_secs': 0.01789} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.744265] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae1168ae-8123-4be3-b309-20db903f82fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.751789] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1878.751789] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52aa2f91-2a29-5b5d-e806-b08d56bece09" [ 1878.751789] env[62519]: _type = "Task" [ 1878.751789] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.760563] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52aa2f91-2a29-5b5d-e806-b08d56bece09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.860686] env[62519]: DEBUG nova.network.neutron [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Successfully updated port: 64341ffd-0da7-4574-9702-3fc0a03eb0e3 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1878.864093] env[62519]: DEBUG nova.network.neutron [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Successfully updated port: c5603e79-74fa-4628-bc67-0351b0407810 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1879.038855] env[62519]: WARNING nova.compute.manager [None req-3d21eaea-4564-4ef0-83e7-f8dc5c87fa6d tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Image not found during snapshot: nova.exception.ImageNotFound: Image 863e09af-587e-4095-b092-e1a3d9d743d3 could not be found. [ 1879.109014] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquiring lock "refresh_cache-71edba06-1628-4749-8a51-5a9bcb003fda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.109274] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquired lock "refresh_cache-71edba06-1628-4749-8a51-5a9bcb003fda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.109499] env[62519]: DEBUG nova.network.neutron [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1879.115094] env[62519]: INFO nova.compute.manager [-] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Took 1.60 seconds to deallocate network for instance. [ 1879.185734] env[62519]: DEBUG nova.compute.utils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1879.188043] env[62519]: DEBUG nova.compute.manager [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1879.188233] env[62519]: DEBUG nova.network.neutron [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1879.230454] env[62519]: DEBUG nova.policy [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81bb350c0ff54453b99b45ac84a82935', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '549cc35f5ff249f6bf22c67872883db0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1879.263921] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52aa2f91-2a29-5b5d-e806-b08d56bece09, 'name': SearchDatastore_Task, 'duration_secs': 0.019084} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.265942] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.266369] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7/618a1db6-4056-4380-b5df-395ac14165a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1879.266753] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51567b8c-d519-4012-b662-c0a343210c92 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.277953] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1879.277953] env[62519]: value = "task-1803187" [ 1879.277953] env[62519]: _type = "Task" [ 1879.277953] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.288498] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.365195] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "refresh_cache-fc3beaba-2ad5-4598-b562-557fdd552b39" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.365368] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired lock "refresh_cache-fc3beaba-2ad5-4598-b562-557fdd552b39" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.365730] env[62519]: DEBUG nova.network.neutron [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1879.369877] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.370815] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.370815] env[62519]: DEBUG nova.network.neutron [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1879.484553] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ab0bd3-a180-4b80-9559-054d7dea78ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.493373] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6467e5a2-2223-4b1c-975e-bb0a2ded7e10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.528666] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58fc9f7-697f-47ea-908c-99595f8a1a1a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.537870] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7007721-506f-4bd7-8ba0-24ff0af648d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.544438] env[62519]: DEBUG nova.network.neutron [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Successfully created port: 082d375d-5e85-4e5e-a40e-661c492b5f5d {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1879.556242] env[62519]: DEBUG nova.compute.provider_tree [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1879.624760] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.664047] env[62519]: DEBUG nova.network.neutron [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1879.691746] env[62519]: DEBUG nova.compute.manager [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1879.792137] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803187, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.971219] env[62519]: DEBUG nova.network.neutron [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1879.975663] env[62519]: WARNING nova.network.neutron [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] 3996e7f6-f093-4152-af91-6fb77f32a1c5 already exists in list: networks containing: ['3996e7f6-f093-4152-af91-6fb77f32a1c5']. ignoring it [ 1880.050039] env[62519]: DEBUG nova.network.neutron [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Updating instance_info_cache with network_info: [{"id": "ff83eb74-d7d7-49d4-8a99-813264a7d1ab", "address": "fa:16:3e:95:80:5c", "network": {"id": "4ce8c1b4-53ed-46b1-989a-160f430e476b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1197347089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d952826a35452f9ac28e0cbf14a980", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff83eb74-d7", "ovs_interfaceid": "ff83eb74-d7d7-49d4-8a99-813264a7d1ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.063022] env[62519]: DEBUG nova.scheduler.client.report [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1880.197493] env[62519]: INFO nova.virt.block_device [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Booting with volume 84bda086-0de7-4b24-8a1f-6ff7c11594b6 at /dev/sda [ 1880.235577] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f20207eb-1947-4065-bb04-222ce6c12dc7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.251372] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3910ca01-f8dc-429e-84ae-47b9c5afd552 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.262422] env[62519]: DEBUG nova.network.neutron [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Updating instance_info_cache with network_info: [{"id": "64341ffd-0da7-4574-9702-3fc0a03eb0e3", "address": "fa:16:3e:62:ec:77", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64341ffd-0d", "ovs_interfaceid": "64341ffd-0da7-4574-9702-3fc0a03eb0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.293679] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b019d9ea-4fe0-48bd-be77-08eacb8c3f22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.302932] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803187, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659158} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.306389] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7/618a1db6-4056-4380-b5df-395ac14165a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1880.306699] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1880.307175] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c3ec939-02db-4cbd-8269-75e2e029a1b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.311623] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbffad05-cfb3-4fe0-8808-d5d493807c3a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.330049] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1880.330049] env[62519]: value = "task-1803188" [ 1880.330049] env[62519]: _type = "Task" [ 1880.330049] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.339147] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.354885] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48be94a3-c430-485a-a63d-bbf8387f6a33 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.363195] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d97fd65-9842-48c8-9cee-bffa4f0d0f16 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.378112] env[62519]: DEBUG nova.virt.block_device [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating existing volume attachment record: 4db207c6-cf05-4b07-adc1-87db81940d8b {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1880.511989] env[62519]: DEBUG nova.network.neutron [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updating instance_info_cache with network_info: [{"id": "037e7926-5eac-487b-a2fa-4124ead561df", "address": "fa:16:3e:f0:b9:0e", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e7926-5e", "ovs_interfaceid": "037e7926-5eac-487b-a2fa-4124ead561df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c5603e79-74fa-4628-bc67-0351b0407810", "address": "fa:16:3e:7f:dd:7f", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5603e79-74", "ovs_interfaceid": "c5603e79-74fa-4628-bc67-0351b0407810", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.553124] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Releasing lock "refresh_cache-71edba06-1628-4749-8a51-5a9bcb003fda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.553568] env[62519]: DEBUG nova.compute.manager [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Instance network_info: |[{"id": "ff83eb74-d7d7-49d4-8a99-813264a7d1ab", "address": "fa:16:3e:95:80:5c", "network": {"id": "4ce8c1b4-53ed-46b1-989a-160f430e476b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1197347089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d952826a35452f9ac28e0cbf14a980", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff83eb74-d7", "ovs_interfaceid": "ff83eb74-d7d7-49d4-8a99-813264a7d1ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1880.554111] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:80:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff83eb74-d7d7-49d4-8a99-813264a7d1ab', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1880.563239] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Creating folder: Project (94d952826a35452f9ac28e0cbf14a980). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1880.563889] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de072ee3-516c-4e45-995c-9f40e3e672d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.583973] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Created folder: Project (94d952826a35452f9ac28e0cbf14a980) in parent group-v373567. [ 1880.584217] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Creating folder: Instances. Parent ref: group-v373843. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1880.585292] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb81dbec-83dd-4c15-9377-27a0cd757f3a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.598236] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Created folder: Instances in parent group-v373843. [ 1880.598568] env[62519]: DEBUG oslo.service.loopingcall [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1880.598816] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1880.599138] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd8ceae7-094d-4764-ba6f-0c711f094267 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.623125] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1880.623125] env[62519]: value = "task-1803191" [ 1880.623125] env[62519]: _type = "Task" [ 1880.623125] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.632180] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803191, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.765875] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Releasing lock "refresh_cache-fc3beaba-2ad5-4598-b562-557fdd552b39" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.766481] env[62519]: DEBUG nova.compute.manager [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Instance network_info: |[{"id": "64341ffd-0da7-4574-9702-3fc0a03eb0e3", "address": "fa:16:3e:62:ec:77", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64341ffd-0d", "ovs_interfaceid": "64341ffd-0da7-4574-9702-3fc0a03eb0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1880.767105] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:ec:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2bf99f85-3a5c-47c6-a603-e215be6ab0bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64341ffd-0da7-4574-9702-3fc0a03eb0e3', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1880.779587] env[62519]: DEBUG oslo.service.loopingcall [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1880.779979] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1880.780235] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d591089-743b-4948-9945-9d2e81064c7c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.804542] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1880.804542] env[62519]: value = "task-1803192" [ 1880.804542] env[62519]: _type = "Task" [ 1880.804542] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.813221] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803192, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.843142] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074887} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.844069] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1880.844587] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8f5b4f-cfad-494b-9ea5-95a79bc0bf50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.869641] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7/618a1db6-4056-4380-b5df-395ac14165a7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1880.870019] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-588ae86f-298c-4090-bd47-37d2bbd9ce46 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.893652] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1880.893652] env[62519]: value = "task-1803193" [ 1880.893652] env[62519]: _type = "Task" [ 1880.893652] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.903203] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.015538] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.016017] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.016254] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.017107] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f08b12-f0aa-4685-960b-59f70b10786b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.051129] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1881.051504] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1881.051741] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1881.052091] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1881.052364] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1881.052625] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1881.053029] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1881.053451] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1881.053713] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1881.054067] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1881.054368] env[62519]: DEBUG nova.virt.hardware [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1881.063296] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Reconfiguring VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1881.063728] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b7d4f02-850a-4502-877b-be6a55984a27 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.077263] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.378s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.080995] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.457s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.081268] env[62519]: DEBUG nova.objects.instance [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'resources' on Instance uuid 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1881.088619] env[62519]: DEBUG oslo_vmware.api [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1881.088619] env[62519]: value = "task-1803194" [ 1881.088619] env[62519]: _type = "Task" [ 1881.088619] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.100820] env[62519]: DEBUG oslo_vmware.api [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803194, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.135363] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803191, 'name': CreateVM_Task, 'duration_secs': 0.484562} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.135545] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1881.136323] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.136501] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.136931] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1881.137244] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9935a5a0-b69e-4c78-ac21-2041906adcfa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.143352] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1881.143352] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e62107-4782-0a6f-38e8-0eeccc8d98be" [ 1881.143352] env[62519]: _type = "Task" [ 1881.143352] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.155469] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e62107-4782-0a6f-38e8-0eeccc8d98be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.315357] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803192, 'name': CreateVM_Task, 'duration_secs': 0.509172} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.315539] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1881.316342] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.404428] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803193, 'name': ReconfigVM_Task, 'duration_secs': 0.324012} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.404712] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7/618a1db6-4056-4380-b5df-395ac14165a7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1881.405370] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df4d5591-1b09-41db-a5fd-308e5241ba8b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.413469] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1881.413469] env[62519]: value = "task-1803195" [ 1881.413469] env[62519]: _type = "Task" [ 1881.413469] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.422989] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803195, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.429814] env[62519]: DEBUG nova.network.neutron [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Successfully updated port: 082d375d-5e85-4e5e-a40e-661c492b5f5d {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1881.599463] env[62519]: DEBUG oslo_vmware.api [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.636512] env[62519]: INFO nova.scheduler.client.report [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted allocation for migration 778f14b8-0ef2-4f73-ab48-4844bd1b034d [ 1881.656016] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e62107-4782-0a6f-38e8-0eeccc8d98be, 'name': SearchDatastore_Task, 'duration_secs': 0.016025} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.658871] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.659169] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1881.659406] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.659551] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.659759] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1881.660320] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.660632] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1881.660872] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f120e031-aded-4b57-9c2a-db0f17308a07 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.662830] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5677499-7fc9-4394-b819-58d4710156aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.669989] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1881.669989] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5275bd72-eb00-f09e-b1e2-213414b7b4fc" [ 1881.669989] env[62519]: _type = "Task" [ 1881.669989] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.677767] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1881.677767] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1881.679173] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47be8d6a-a1ec-4cc3-b7ac-b664ffb685d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.685055] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5275bd72-eb00-f09e-b1e2-213414b7b4fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.691226] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1881.691226] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5272f47f-2ee6-9e73-0b83-adc533bf28d3" [ 1881.691226] env[62519]: _type = "Task" [ 1881.691226] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.700542] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5272f47f-2ee6-9e73-0b83-adc533bf28d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.835459] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fca0c6-d597-4440-8718-fc8c7fee4462 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.844504] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c31376-4dfe-4cb5-9a4c-df8ae6fbafa4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.878449] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228ca5ff-f354-4f95-9b55-80a8406d0082 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.887089] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cda4de8-8484-458d-8577-1a303314e21b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.901219] env[62519]: DEBUG nova.compute.provider_tree [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1881.924252] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803195, 'name': Rename_Task, 'duration_secs': 0.144868} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.924536] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1881.924783] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d09d474d-9fad-4939-ac38-2709243e2d97 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.932064] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1881.932064] env[62519]: value = "task-1803196" [ 1881.932064] env[62519]: _type = "Task" [ 1881.932064] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.935388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.935522] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.935669] env[62519]: DEBUG nova.network.neutron [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1881.942740] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.100640] env[62519]: DEBUG oslo_vmware.api [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803194, 'name': ReconfigVM_Task, 'duration_secs': 0.733468} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.101208] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.101437] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Reconfigured VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1882.144775] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.831s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.182223] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5275bd72-eb00-f09e-b1e2-213414b7b4fc, 'name': SearchDatastore_Task, 'duration_secs': 0.026534} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.182543] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.182787] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1882.182997] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.203137] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5272f47f-2ee6-9e73-0b83-adc533bf28d3, 'name': SearchDatastore_Task, 'duration_secs': 0.018905} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.203993] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-debf0c95-da76-4a7b-9987-e1125de58aaa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.210754] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1882.210754] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521de860-b70f-5c2b-18ae-460f396205f2" [ 1882.210754] env[62519]: _type = "Task" [ 1882.210754] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.219839] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521de860-b70f-5c2b-18ae-460f396205f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.404404] env[62519]: DEBUG nova.scheduler.client.report [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1882.447085] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803196, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.473727] env[62519]: DEBUG nova.compute.manager [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1882.474476] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1882.474743] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1882.474945] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1882.475216] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1882.475405] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1882.475595] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1882.475897] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1882.476117] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1882.476323] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1882.476535] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1882.476756] env[62519]: DEBUG nova.virt.hardware [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1882.477898] env[62519]: DEBUG nova.network.neutron [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1882.480907] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e683e7-bf21-4d19-911b-5dc6d77a92d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.492581] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8e2d7c-0fc9-479b-9f87-ba96192e1765 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.606475] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ebc83ca-512d-4cc7-bfec-9a7d758e59d6 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 11.046s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.645510] env[62519]: DEBUG nova.network.neutron [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance_info_cache with network_info: [{"id": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "address": "fa:16:3e:4a:40:42", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap082d375d-5e", "ovs_interfaceid": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.723102] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521de860-b70f-5c2b-18ae-460f396205f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010037} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.723102] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.723303] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 71edba06-1628-4749-8a51-5a9bcb003fda/71edba06-1628-4749-8a51-5a9bcb003fda.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1882.723587] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.723893] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1882.725114] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-466886f9-736f-40a0-aba0-24f9884803a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.726512] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b320ae13-492a-4c00-8dab-b6a30ad58a93 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.734330] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1882.734330] env[62519]: value = "task-1803197" [ 1882.734330] env[62519]: _type = "Task" [ 1882.734330] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.738888] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1882.739079] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1882.739833] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-674c6099-cb47-4ab6-9ac5-ae4cef429ad9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.745162] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.749184] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1882.749184] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c33dea-feec-cf42-0aea-27586cd3e184" [ 1882.749184] env[62519]: _type = "Task" [ 1882.749184] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.758292] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c33dea-feec-cf42-0aea-27586cd3e184, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.910381] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.934423] env[62519]: INFO nova.scheduler.client.report [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleted allocations for instance 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6 [ 1882.952689] env[62519]: DEBUG oslo_vmware.api [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803196, 'name': PowerOnVM_Task, 'duration_secs': 0.55654} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.952689] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1882.952689] env[62519]: INFO nova.compute.manager [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Took 14.16 seconds to spawn the instance on the hypervisor. [ 1882.952689] env[62519]: DEBUG nova.compute.manager [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1882.954346] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47dcb618-f98c-4477-b4ab-125f6461f639 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.149048] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.149409] env[62519]: DEBUG nova.compute.manager [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Instance network_info: |[{"id": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "address": "fa:16:3e:4a:40:42", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap082d375d-5e", "ovs_interfaceid": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1883.151163] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:40:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc1e16db-ad3b-4b7f-ab64-4609c87abac0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '082d375d-5e85-4e5e-a40e-661c492b5f5d', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1883.158714] env[62519]: DEBUG oslo.service.loopingcall [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1883.159567] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1883.159847] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d388077-caf6-4635-8d1d-7d8457ed2884 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.184338] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1883.184338] env[62519]: value = "task-1803198" [ 1883.184338] env[62519]: _type = "Task" [ 1883.184338] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.198645] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803198, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.246415] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803197, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497796} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.246677] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 71edba06-1628-4749-8a51-5a9bcb003fda/71edba06-1628-4749-8a51-5a9bcb003fda.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1883.247023] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1883.247352] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26ce0efb-1687-4d38-8b69-266e4a7a03c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.260810] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c33dea-feec-cf42-0aea-27586cd3e184, 'name': SearchDatastore_Task, 'duration_secs': 0.019124} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.262837] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1883.262837] env[62519]: value = "task-1803199" [ 1883.262837] env[62519]: _type = "Task" [ 1883.262837] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.263203] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08e79ce7-5cb2-4968-8048-5ba75fc328f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.273230] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1883.273230] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5213366e-b761-0ce3-54db-18105b1cd998" [ 1883.273230] env[62519]: _type = "Task" [ 1883.273230] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.277157] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803199, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.286248] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5213366e-b761-0ce3-54db-18105b1cd998, 'name': SearchDatastore_Task, 'duration_secs': 0.010328} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.286520] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.286776] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] fc3beaba-2ad5-4598-b562-557fdd552b39/fc3beaba-2ad5-4598-b562-557fdd552b39.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1883.287050] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da9f5420-1a92-4cdf-b5ea-24830b8dcd09 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.294244] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1883.294244] env[62519]: value = "task-1803200" [ 1883.294244] env[62519]: _type = "Task" [ 1883.294244] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.302950] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803200, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.447581] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b5e9901c-e7db-4cf5-a185-5e35645884ad tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "4a9656fd-2b9f-4dd6-8b71-39e55813f2f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.215s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.475182] env[62519]: INFO nova.compute.manager [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Took 19.31 seconds to build instance. [ 1883.695836] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803198, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.776086] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803199, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083614} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.776086] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1883.776550] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97eb58f9-8306-4291-bafd-b8ee0a324778 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.803996] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 71edba06-1628-4749-8a51-5a9bcb003fda/71edba06-1628-4749-8a51-5a9bcb003fda.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1883.804365] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57f98e6d-b315-4ccf-9227-9164d3d148de {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.828979] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803200, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48455} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.829318] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] fc3beaba-2ad5-4598-b562-557fdd552b39/fc3beaba-2ad5-4598-b562-557fdd552b39.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1883.829557] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1883.830197] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7872db9-9f3f-48d2-a6a2-4c8186fa5093 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.833382] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1883.833382] env[62519]: value = "task-1803201" [ 1883.833382] env[62519]: _type = "Task" [ 1883.833382] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.838249] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1883.838249] env[62519]: value = "task-1803202" [ 1883.838249] env[62519]: _type = "Task" [ 1883.838249] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.851099] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803201, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.862542] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803202, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.197060] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803198, 'name': CreateVM_Task, 'duration_secs': 0.710991} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.197335] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1884.198161] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373835', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'name': 'volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '31db4b14-0ba3-4159-accc-31c21bd81322', 'attached_at': '', 'detached_at': '', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'serial': '84bda086-0de7-4b24-8a1f-6ff7c11594b6'}, 'guest_format': None, 'boot_index': 0, 'delete_on_termination': True, 'device_type': None, 'disk_bus': None, 'mount_device': '/dev/sda', 'attachment_id': '4db207c6-cf05-4b07-adc1-87db81940d8b', 'volume_type': None}], 'swap': None} {{(pid=62519) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1884.198460] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Root volume attach. Driver type: vmdk {{(pid=62519) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1884.199403] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc5e950-d594-4bad-84aa-c2b1bd344190 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.208930] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecb416a-594c-4d2e-bfdd-7cc2d775c391 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.216153] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e436d4f-1848-4561-b9d4-b2cdef683ae4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.222942] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-bb3897a3-01ec-4b5d-9149-c2886ef31ed5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.232287] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1884.232287] env[62519]: value = "task-1803203" [ 1884.232287] env[62519]: _type = "Task" [ 1884.232287] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.241423] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803203, 'name': RelocateVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.350721] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803202, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119137} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.354431] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1884.355012] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.355294] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.355582] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.355842] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.356058] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.357991] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803201, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.358594] env[62519]: INFO nova.compute.manager [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Terminating instance [ 1884.360661] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b06e8a-78ab-4bf8-8c90-85f578d6a982 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.385974] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] fc3beaba-2ad5-4598-b562-557fdd552b39/fc3beaba-2ad5-4598-b562-557fdd552b39.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1884.387036] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb48449c-840f-45f9-a2d4-295f374de58d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.412475] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1884.412475] env[62519]: value = "task-1803204" [ 1884.412475] env[62519]: _type = "Task" [ 1884.412475] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.425379] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803204, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.746492] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803203, 'name': RelocateVM_Task} progress is 20%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.846065] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803201, 'name': ReconfigVM_Task, 'duration_secs': 0.528751} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.846409] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 71edba06-1628-4749-8a51-5a9bcb003fda/71edba06-1628-4749-8a51-5a9bcb003fda.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1884.847070] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-434a060d-88a3-4d02-ab86-0ec278778ed4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.854585] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1884.854585] env[62519]: value = "task-1803205" [ 1884.854585] env[62519]: _type = "Task" [ 1884.854585] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.866451] env[62519]: DEBUG nova.compute.manager [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1884.866451] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1884.866616] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803205, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.868390] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0ddd69-cdf6-4530-877c-83103573f706 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.871991] env[62519]: DEBUG nova.compute.manager [req-849e3c77-d341-47ec-b94c-371afbcb5553 req-985b4714-6718-4e01-b901-627fc6b3cdc0 service nova] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Received event network-vif-deleted-0f634cea-fa73-4a6c-855a-cee16f97b053 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1884.878903] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1884.879236] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b96ede13-c67a-446c-9a57-bccd0aa4cbae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.886270] env[62519]: DEBUG oslo_vmware.api [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1884.886270] env[62519]: value = "task-1803206" [ 1884.886270] env[62519]: _type = "Task" [ 1884.886270] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.901601] env[62519]: DEBUG oslo_vmware.api [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803206, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.923885] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803204, 'name': ReconfigVM_Task, 'duration_secs': 0.328477} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.924267] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfigured VM instance instance-00000069 to attach disk [datastore1] fc3beaba-2ad5-4598-b562-557fdd552b39/fc3beaba-2ad5-4598-b562-557fdd552b39.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1884.924971] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10140cf0-bc62-4b80-8769-924f8a5cb48f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.932709] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1884.932709] env[62519]: value = "task-1803207" [ 1884.932709] env[62519]: _type = "Task" [ 1884.932709] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.943858] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803207, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.982831] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a5caa4da-9d7e-47d4-bf32-0295974b7632 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "618a1db6-4056-4380-b5df-395ac14165a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.827s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.250892] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803203, 'name': RelocateVM_Task} progress is 20%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.366243] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803205, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.397425] env[62519]: INFO nova.compute.manager [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Rescuing [ 1885.400634] env[62519]: DEBUG oslo_concurrency.lockutils [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.400634] env[62519]: DEBUG oslo_concurrency.lockutils [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.400634] env[62519]: DEBUG nova.network.neutron [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1885.400634] env[62519]: DEBUG oslo_vmware.api [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803206, 'name': PowerOffVM_Task, 'duration_secs': 0.204217} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.400634] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1885.400634] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1885.400634] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8cb29fd-db87-46fc-aa10-50366330852a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.443754] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803207, 'name': Rename_Task, 'duration_secs': 0.197761} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.446331] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1885.446331] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d579fda9-3864-4d14-acf3-67975ee9a00c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.455382] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1885.455382] env[62519]: value = "task-1803209" [ 1885.455382] env[62519]: _type = "Task" [ 1885.455382] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.459428] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.459941] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.460376] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.460734] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.461095] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.463433] env[62519]: INFO nova.compute.manager [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Terminating instance [ 1885.470448] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803209, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.537283] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1885.538089] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1885.538603] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleting the datastore file [datastore1] eb5de0a4-0af3-4731-ab30-3ae3d72207a7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1885.539481] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-392f1972-c0ad-4d2c-a6f7-851ed9a67905 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.548043] env[62519]: DEBUG oslo_vmware.api [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for the task: (returnval){ [ 1885.548043] env[62519]: value = "task-1803210" [ 1885.548043] env[62519]: _type = "Task" [ 1885.548043] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.559151] env[62519]: DEBUG oslo_vmware.api [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.749051] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803203, 'name': RelocateVM_Task} progress is 20%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.867920] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803205, 'name': Rename_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.972043] env[62519]: DEBUG nova.compute.manager [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1885.972406] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1885.972807] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803209, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.974719] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0631216d-0eae-4d7d-a514-0e1f2237dbab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.986671] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1885.987087] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9911f8d-5146-44a6-a1a5-ebdf7a31e54a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.998089] env[62519]: DEBUG oslo_vmware.api [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1885.998089] env[62519]: value = "task-1803211" [ 1885.998089] env[62519]: _type = "Task" [ 1885.998089] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.014355] env[62519]: DEBUG oslo_vmware.api [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803211, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.068845] env[62519]: DEBUG oslo_vmware.api [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Task: {'id': task-1803210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156811} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.069173] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1886.069356] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1886.069717] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1886.069782] env[62519]: INFO nova.compute.manager [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1886.069995] env[62519]: DEBUG oslo.service.loopingcall [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1886.070695] env[62519]: DEBUG nova.compute.manager [-] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1886.070695] env[62519]: DEBUG nova.network.neutron [-] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1886.228224] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "a0c60d49-83bb-434b-815c-c39e7493cbb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.228555] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.247537] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803203, 'name': RelocateVM_Task, 'duration_secs': 1.628245} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.247915] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1886.248141] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373835', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'name': 'volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '31db4b14-0ba3-4159-accc-31c21bd81322', 'attached_at': '', 'detached_at': '', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'serial': '84bda086-0de7-4b24-8a1f-6ff7c11594b6'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1886.249096] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d699444-3e27-4aa8-9220-59417244e95f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.267556] env[62519]: DEBUG nova.network.neutron [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating instance_info_cache with network_info: [{"id": "7220339b-d4a7-441b-8227-c956744ce0c0", "address": "fa:16:3e:0f:ad:ae", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7220339b-d4", "ovs_interfaceid": "7220339b-d4a7-441b-8227-c956744ce0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.269439] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74957c3-bffd-4357-a19a-f74db1b7a1ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.302329] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6/volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1886.304138] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0a0834c-5f9c-443f-977f-f427a0e803f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.328870] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1886.328870] env[62519]: value = "task-1803212" [ 1886.328870] env[62519]: _type = "Task" [ 1886.328870] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.338584] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803212, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.370041] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803205, 'name': Rename_Task, 'duration_secs': 1.166643} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.370041] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1886.370041] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c5f1851-4278-4e09-ae45-54dd7567a03b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.375324] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1886.375324] env[62519]: value = "task-1803213" [ 1886.375324] env[62519]: _type = "Task" [ 1886.375324] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.384835] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803213, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.438284] env[62519]: DEBUG oslo_vmware.rw_handles [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528eb81e-1d65-d01e-a2fc-6aa7b08f6f84/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1886.439367] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584608e7-b988-41b6-8f72-3f7517c03f02 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.448562] env[62519]: DEBUG oslo_vmware.rw_handles [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528eb81e-1d65-d01e-a2fc-6aa7b08f6f84/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1886.448779] env[62519]: ERROR oslo_vmware.rw_handles [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528eb81e-1d65-d01e-a2fc-6aa7b08f6f84/disk-0.vmdk due to incomplete transfer. [ 1886.449057] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c4b46d0b-4e19-4462-bdd5-49457587ed58 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.461517] env[62519]: DEBUG oslo_vmware.rw_handles [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528eb81e-1d65-d01e-a2fc-6aa7b08f6f84/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1886.461747] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Uploaded image abe7c237-ca29-424e-8add-cbb34800b1aa to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1886.464174] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1886.464707] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1553d4da-8d84-49e6-b333-45f7e6524a60 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.472802] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803209, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.474188] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1886.474188] env[62519]: value = "task-1803214" [ 1886.474188] env[62519]: _type = "Task" [ 1886.474188] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.482893] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803214, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.508960] env[62519]: DEBUG oslo_vmware.api [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803211, 'name': PowerOffVM_Task, 'duration_secs': 0.304388} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.509260] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1886.509434] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1886.509722] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71095dd0-2a80-425b-af4e-a25ad1d8af10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.563737] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.564041] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.610280] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-c5603e79-74fa-4628-bc67-0351b0407810" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.610656] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-c5603e79-74fa-4628-bc67-0351b0407810" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.732106] env[62519]: DEBUG nova.compute.manager [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1886.764724] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1886.765093] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1886.765267] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Deleting the datastore file [datastore1] 70abb2e0-1ff2-49dd-b40f-9cac244a249e {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1886.766030] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22fc165a-07e0-40a7-9065-cf7d5d8a75a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.775688] env[62519]: DEBUG oslo_concurrency.lockutils [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.778726] env[62519]: DEBUG oslo_vmware.api [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1886.778726] env[62519]: value = "task-1803216" [ 1886.778726] env[62519]: _type = "Task" [ 1886.778726] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.792347] env[62519]: DEBUG oslo_vmware.api [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.840115] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803212, 'name': ReconfigVM_Task, 'duration_secs': 0.357116} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.840609] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6/volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1886.845587] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1960b9b-1842-4c1c-a46a-a0ee5699db91 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.864673] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1886.864673] env[62519]: value = "task-1803217" [ 1886.864673] env[62519]: _type = "Task" [ 1886.864673] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.874300] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803217, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.885987] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803213, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.931559] env[62519]: DEBUG nova.network.neutron [-] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.971029] env[62519]: DEBUG oslo_vmware.api [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803209, 'name': PowerOnVM_Task, 'duration_secs': 1.39251} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.971223] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1886.971436] env[62519]: INFO nova.compute.manager [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Took 11.52 seconds to spawn the instance on the hypervisor. [ 1886.971614] env[62519]: DEBUG nova.compute.manager [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1886.972544] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b8d090-211f-4854-aef7-033df0d691fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.985067] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803214, 'name': Destroy_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.001870] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Received event network-vif-plugged-ff83eb74-d7d7-49d4-8a99-813264a7d1ab {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1887.001870] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquiring lock "71edba06-1628-4749-8a51-5a9bcb003fda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.001870] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Lock "71edba06-1628-4749-8a51-5a9bcb003fda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.001870] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Lock "71edba06-1628-4749-8a51-5a9bcb003fda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.001870] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] No waiting events found dispatching network-vif-plugged-ff83eb74-d7d7-49d4-8a99-813264a7d1ab {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1887.001870] env[62519]: WARNING nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Received unexpected event network-vif-plugged-ff83eb74-d7d7-49d4-8a99-813264a7d1ab for instance with vm_state building and task_state spawning. [ 1887.001870] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Received event network-changed-ff83eb74-d7d7-49d4-8a99-813264a7d1ab {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1887.001870] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Refreshing instance network info cache due to event network-changed-ff83eb74-d7d7-49d4-8a99-813264a7d1ab. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1887.001870] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquiring lock "refresh_cache-71edba06-1628-4749-8a51-5a9bcb003fda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.001870] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquired lock "refresh_cache-71edba06-1628-4749-8a51-5a9bcb003fda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1887.001870] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Refreshing network info cache for port ff83eb74-d7d7-49d4-8a99-813264a7d1ab {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1887.067490] env[62519]: INFO nova.compute.manager [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Detaching volume 9c0a59d7-6968-4064-b0ab-53e4c74275f5 [ 1887.110380] env[62519]: INFO nova.virt.block_device [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Attempting to driver detach volume 9c0a59d7-6968-4064-b0ab-53e4c74275f5 from mountpoint /dev/sdb [ 1887.110711] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1887.110948] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373810', 'volume_id': '9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'name': 'volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '46b3a0fb-29f6-4b66-a091-2d125b69d109', 'attached_at': '', 'detached_at': '', 'volume_id': '9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'serial': '9c0a59d7-6968-4064-b0ab-53e4c74275f5'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1887.112017] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06201411-70e0-4471-b2dd-a03511bb45ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.115617] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.115787] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1887.116553] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61c326a-e65a-4904-83c5-ea80cc52f622 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.171258] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84e294a-d0b2-444d-bb41-07fc5a39d44b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.174932] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc72803d-1f57-4c4b-969f-4e745cee3ef0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.199138] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985eaa42-e72d-425c-98d2-920b2ee13e89 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.207859] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Reconfiguring VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1887.208256] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0859cad8-b2a1-4b45-8289-94532246c917 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.253179] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62315d61-dfdd-4ba4-9fbe-e65be17bd80e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.256563] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1887.256563] env[62519]: value = "task-1803218" [ 1887.256563] env[62519]: _type = "Task" [ 1887.256563] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.278084] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] The volume has not been displaced from its original location: [datastore1] volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5/volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1887.284083] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Reconfiguring VM instance instance-0000003a to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1887.285436] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.285687] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.287271] env[62519]: INFO nova.compute.claims [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1887.293028] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-646816d6-d961-4548-a528-984ad8277302 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.311685] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.323645] env[62519]: DEBUG oslo_vmware.api [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.459707} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.325493] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1887.325690] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1887.325858] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1887.326083] env[62519]: INFO nova.compute.manager [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1887.326343] env[62519]: DEBUG oslo.service.loopingcall [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1887.326621] env[62519]: DEBUG oslo_vmware.api [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1887.326621] env[62519]: value = "task-1803219" [ 1887.326621] env[62519]: _type = "Task" [ 1887.326621] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.327111] env[62519]: DEBUG nova.compute.manager [-] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1887.327208] env[62519]: DEBUG nova.network.neutron [-] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1887.341935] env[62519]: DEBUG oslo_vmware.api [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803219, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.376303] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803217, 'name': ReconfigVM_Task, 'duration_secs': 0.28043} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.376647] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373835', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'name': 'volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '31db4b14-0ba3-4159-accc-31c21bd81322', 'attached_at': '', 'detached_at': '', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'serial': '84bda086-0de7-4b24-8a1f-6ff7c11594b6'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1887.377311] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8cc5d00-98d3-480f-a4f4-4961aa6e6629 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.390962] env[62519]: DEBUG oslo_vmware.api [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803213, 'name': PowerOnVM_Task, 'duration_secs': 0.885282} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.392650] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1887.392897] env[62519]: INFO nova.compute.manager [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Took 16.17 seconds to spawn the instance on the hypervisor. [ 1887.393169] env[62519]: DEBUG nova.compute.manager [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1887.397028] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1887.397028] env[62519]: value = "task-1803220" [ 1887.397028] env[62519]: _type = "Task" [ 1887.397028] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.397028] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315d547c-0ef6-4593-96e0-9f52e6fdc4c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.409654] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803220, 'name': Rename_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.434795] env[62519]: INFO nova.compute.manager [-] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Took 1.36 seconds to deallocate network for instance. [ 1887.492026] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803214, 'name': Destroy_Task, 'duration_secs': 0.542147} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.492368] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Destroyed the VM [ 1887.492625] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1887.497479] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fff55947-f7c5-467d-9aef-d16ea7faf4ef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.503279] env[62519]: INFO nova.compute.manager [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Took 19.95 seconds to build instance. [ 1887.511434] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1887.511434] env[62519]: value = "task-1803221" [ 1887.511434] env[62519]: _type = "Task" [ 1887.511434] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.524490] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803221, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.771503] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.844435] env[62519]: DEBUG oslo_vmware.api [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803219, 'name': ReconfigVM_Task, 'duration_secs': 0.332568} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.844810] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Reconfigured VM instance instance-0000003a to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1887.849897] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ed27afd-fea0-43ee-9a27-603f38ca78ef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.871995] env[62519]: DEBUG oslo_vmware.api [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1887.871995] env[62519]: value = "task-1803222" [ 1887.871995] env[62519]: _type = "Task" [ 1887.871995] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.882937] env[62519]: DEBUG oslo_vmware.api [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803222, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.909945] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803220, 'name': Rename_Task, 'duration_secs': 0.177173} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.910295] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1887.910587] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-154f84c6-10f7-41ee-9e99-cf677ca5000d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.920998] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1887.920998] env[62519]: value = "task-1803223" [ 1887.920998] env[62519]: _type = "Task" [ 1887.920998] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.927732] env[62519]: INFO nova.compute.manager [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Took 22.05 seconds to build instance. [ 1887.933605] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803223, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.945060] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.036498] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803221, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.089878] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Updated VIF entry in instance network info cache for port ff83eb74-d7d7-49d4-8a99-813264a7d1ab. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1888.090268] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Updating instance_info_cache with network_info: [{"id": "ff83eb74-d7d7-49d4-8a99-813264a7d1ab", "address": "fa:16:3e:95:80:5c", "network": {"id": "4ce8c1b4-53ed-46b1-989a-160f430e476b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1197347089-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94d952826a35452f9ac28e0cbf14a980", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff83eb74-d7", "ovs_interfaceid": "ff83eb74-d7d7-49d4-8a99-813264a7d1ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.272150] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.320468] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1888.321060] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-456e8606-c026-41f5-8aef-04e3accb778a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.331146] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1888.331146] env[62519]: value = "task-1803224" [ 1888.331146] env[62519]: _type = "Task" [ 1888.331146] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.340551] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.385073] env[62519]: DEBUG oslo_vmware.api [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803222, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.443710] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803223, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.526477] env[62519]: DEBUG oslo_vmware.api [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803221, 'name': RemoveSnapshot_Task, 'duration_secs': 0.961025} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.526477] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1888.526477] env[62519]: INFO nova.compute.manager [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Took 19.26 seconds to snapshot the instance on the hypervisor. [ 1888.593690] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Releasing lock "refresh_cache-71edba06-1628-4749-8a51-5a9bcb003fda" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.593983] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Received event network-vif-plugged-c5603e79-74fa-4628-bc67-0351b0407810 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1888.594246] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquiring lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.594468] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.594731] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.595032] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] No waiting events found dispatching network-vif-plugged-c5603e79-74fa-4628-bc67-0351b0407810 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1888.595140] env[62519]: WARNING nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Received unexpected event network-vif-plugged-c5603e79-74fa-4628-bc67-0351b0407810 for instance with vm_state active and task_state None. [ 1888.595288] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Received event network-vif-plugged-64341ffd-0da7-4574-9702-3fc0a03eb0e3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1888.595454] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.595725] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.596225] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.596225] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] No waiting events found dispatching network-vif-plugged-64341ffd-0da7-4574-9702-3fc0a03eb0e3 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1888.596225] env[62519]: WARNING nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Received unexpected event network-vif-plugged-64341ffd-0da7-4574-9702-3fc0a03eb0e3 for instance with vm_state building and task_state spawning. [ 1888.596358] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Received event network-changed-64341ffd-0da7-4574-9702-3fc0a03eb0e3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1888.596444] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Refreshing instance network info cache due to event network-changed-64341ffd-0da7-4574-9702-3fc0a03eb0e3. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1888.596633] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquiring lock "refresh_cache-fc3beaba-2ad5-4598-b562-557fdd552b39" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1888.596785] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquired lock "refresh_cache-fc3beaba-2ad5-4598-b562-557fdd552b39" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1888.596946] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Refreshing network info cache for port 64341ffd-0da7-4574-9702-3fc0a03eb0e3 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1888.601048] env[62519]: DEBUG nova.network.neutron [-] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.615722] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7aa6e47-96ef-45c9-8d7f-1b650be22ed4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.624820] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45696a03-ad48-49d4-9ea3-071d8070162c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.657439] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de2939d-673c-4836-a4d9-9aab12f0b183 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.666703] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edff0c98-5eba-4d19-a754-31a2f7b82206 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.681556] env[62519]: DEBUG nova.compute.provider_tree [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1888.772343] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.844293] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.885168] env[62519]: DEBUG oslo_vmware.api [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803222, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.936027] env[62519]: DEBUG oslo_vmware.api [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803223, 'name': PowerOnVM_Task, 'duration_secs': 0.74819} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.936150] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1888.936597] env[62519]: INFO nova.compute.manager [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Took 6.46 seconds to spawn the instance on the hypervisor. [ 1888.936597] env[62519]: DEBUG nova.compute.manager [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1888.937234] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e6b043-5e74-4dbf-a64d-0e668798e5f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.011599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4de658fb-fc0f-466b-a6c9-861a7b1ad6fd tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.467s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.027186] env[62519]: DEBUG nova.compute.manager [req-4d141eb1-80ae-4f2b-9456-4246a17afbad req-962ce482-6d86-4435-829f-bca944fe1c7e service nova] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Received event network-vif-deleted-4a3113f5-f1bd-4056-9bbe-75a22f8189bc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1889.027384] env[62519]: DEBUG nova.compute.manager [req-4d141eb1-80ae-4f2b-9456-4246a17afbad req-962ce482-6d86-4435-829f-bca944fe1c7e service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Received event network-changed-64341ffd-0da7-4574-9702-3fc0a03eb0e3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1889.027508] env[62519]: DEBUG nova.compute.manager [req-4d141eb1-80ae-4f2b-9456-4246a17afbad req-962ce482-6d86-4435-829f-bca944fe1c7e service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Refreshing instance network info cache due to event network-changed-64341ffd-0da7-4574-9702-3fc0a03eb0e3. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1889.027702] env[62519]: DEBUG oslo_concurrency.lockutils [req-4d141eb1-80ae-4f2b-9456-4246a17afbad req-962ce482-6d86-4435-829f-bca944fe1c7e service nova] Acquiring lock "refresh_cache-fc3beaba-2ad5-4598-b562-557fdd552b39" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1889.076369] env[62519]: DEBUG nova.compute.manager [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Found 3 images (rotation: 2) {{(pid=62519) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4998}} [ 1889.076611] env[62519]: DEBUG nova.compute.manager [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Rotating out 1 backups {{(pid=62519) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5006}} [ 1889.076783] env[62519]: DEBUG nova.compute.manager [None req-940b526d-cc71-4b86-8030-ffe5ea468d54 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deleting image bf703b2e-801c-4321-92c0-c8d0deb17ae0 {{(pid=62519) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5011}} [ 1889.105756] env[62519]: INFO nova.compute.manager [-] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Took 1.78 seconds to deallocate network for instance. [ 1889.187766] env[62519]: DEBUG nova.scheduler.client.report [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1889.277249] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.341935] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.351064] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Updated VIF entry in instance network info cache for port 64341ffd-0da7-4574-9702-3fc0a03eb0e3. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1889.351502] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Updating instance_info_cache with network_info: [{"id": "64341ffd-0da7-4574-9702-3fc0a03eb0e3", "address": "fa:16:3e:62:ec:77", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64341ffd-0d", "ovs_interfaceid": "64341ffd-0da7-4574-9702-3fc0a03eb0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.387883] env[62519]: DEBUG oslo_vmware.api [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803222, 'name': ReconfigVM_Task, 'duration_secs': 1.219806} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.388298] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373810', 'volume_id': '9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'name': 'volume-9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '46b3a0fb-29f6-4b66-a091-2d125b69d109', 'attached_at': '', 'detached_at': '', 'volume_id': '9c0a59d7-6968-4064-b0ab-53e4c74275f5', 'serial': '9c0a59d7-6968-4064-b0ab-53e4c74275f5'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1889.437840] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1f95801e-70c9-40ad-906a-9a7b13227dbd tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "71edba06-1628-4749-8a51-5a9bcb003fda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.571s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.462355] env[62519]: INFO nova.compute.manager [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Took 13.12 seconds to build instance. [ 1889.613735] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.693351] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.693885] env[62519]: DEBUG nova.compute.manager [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1889.700323] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.755s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.700538] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.702779] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.090s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.703069] env[62519]: DEBUG nova.objects.instance [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lazy-loading 'resources' on Instance uuid 70abb2e0-1ff2-49dd-b40f-9cac244a249e {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1889.728417] env[62519]: INFO nova.scheduler.client.report [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Deleted allocations for instance eb5de0a4-0af3-4731-ab30-3ae3d72207a7 [ 1889.775590] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.778114] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquiring lock "71edba06-1628-4749-8a51-5a9bcb003fda" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.778367] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "71edba06-1628-4749-8a51-5a9bcb003fda" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.778605] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquiring lock "71edba06-1628-4749-8a51-5a9bcb003fda-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.778813] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "71edba06-1628-4749-8a51-5a9bcb003fda-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.778994] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "71edba06-1628-4749-8a51-5a9bcb003fda-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.781032] env[62519]: INFO nova.compute.manager [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Terminating instance [ 1889.842455] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803224, 'name': PowerOffVM_Task, 'duration_secs': 1.126801} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.842830] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1889.843618] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d24d6a1-5009-4d53-898d-b7fff1aeeb81 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.865631] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Releasing lock "refresh_cache-fc3beaba-2ad5-4598-b562-557fdd552b39" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1889.865923] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Received event network-changed-c5603e79-74fa-4628-bc67-0351b0407810 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1889.866105] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Refreshing instance network info cache due to event network-changed-c5603e79-74fa-4628-bc67-0351b0407810. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1889.866322] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquiring lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1889.866465] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquired lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1889.866626] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Refreshing network info cache for port c5603e79-74fa-4628-bc67-0351b0407810 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1889.868551] env[62519]: DEBUG oslo_concurrency.lockutils [req-4d141eb1-80ae-4f2b-9456-4246a17afbad req-962ce482-6d86-4435-829f-bca944fe1c7e service nova] Acquired lock "refresh_cache-fc3beaba-2ad5-4598-b562-557fdd552b39" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1889.868735] env[62519]: DEBUG nova.network.neutron [req-4d141eb1-80ae-4f2b-9456-4246a17afbad req-962ce482-6d86-4435-829f-bca944fe1c7e service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Refreshing network info cache for port 64341ffd-0da7-4574-9702-3fc0a03eb0e3 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1889.872021] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466c7b1a-ac2c-4457-a8e7-16105447e016 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.900960] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1889.901412] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51392d67-96f1-4ec2-9ebb-1ef6086c4d5a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.910822] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1889.910822] env[62519]: value = "task-1803225" [ 1889.910822] env[62519]: _type = "Task" [ 1889.910822] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.921012] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1889.921237] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1889.921487] env[62519]: DEBUG oslo_concurrency.lockutils [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1889.921639] env[62519]: DEBUG oslo_concurrency.lockutils [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1889.921817] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1889.922092] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd6a3b7b-e6ad-4ce1-b325-01d46a042e77 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.932513] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1889.932735] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1889.933501] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e174544f-5a46-499e-ace9-fad66c8f883d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.940583] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1889.940583] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526158d7-9181-bd74-90fd-dbb2c670a10c" [ 1889.940583] env[62519]: _type = "Task" [ 1889.940583] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.946930] env[62519]: DEBUG nova.objects.instance [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'flavor' on Instance uuid 46b3a0fb-29f6-4b66-a091-2d125b69d109 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1889.952164] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526158d7-9181-bd74-90fd-dbb2c670a10c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.206447] env[62519]: DEBUG nova.compute.utils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1890.215450] env[62519]: DEBUG nova.compute.manager [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1890.215629] env[62519]: DEBUG nova.network.neutron [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1890.241391] env[62519]: DEBUG oslo_concurrency.lockutils [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Lock "eb5de0a4-0af3-4731-ab30-3ae3d72207a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.886s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.276638] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.278706] env[62519]: DEBUG nova.policy [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '053a30aaf81b4cbd8ced7018ebfe1f40', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e755fb5a6e94068b6c99b1638081f5f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1890.285731] env[62519]: DEBUG nova.compute.manager [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1890.285952] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1890.287745] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88acde44-bbe9-4237-8904-e31e870a073e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.296730] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1890.296998] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c24b86e-7312-444f-b2fa-f57fc384cad1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.303985] env[62519]: DEBUG oslo_vmware.api [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1890.303985] env[62519]: value = "task-1803226" [ 1890.303985] env[62519]: _type = "Task" [ 1890.303985] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.317707] env[62519]: DEBUG oslo_vmware.api [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.461168] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526158d7-9181-bd74-90fd-dbb2c670a10c, 'name': SearchDatastore_Task, 'duration_secs': 0.012498} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.463794] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-048f0aa4-635c-498b-8912-de28ecfae4a9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.472639] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1890.472639] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5206e74c-2d82-9b16-4b57-5bc7b93ce4cf" [ 1890.472639] env[62519]: _type = "Task" [ 1890.472639] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.496315] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5206e74c-2d82-9b16-4b57-5bc7b93ce4cf, 'name': SearchDatastore_Task, 'duration_secs': 0.01592} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.496816] env[62519]: DEBUG oslo_concurrency.lockutils [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1890.497547] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f3665f89-1747-4567-9e56-c937d4ac81da/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. {{(pid=62519) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1890.500731] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6df63d5-e24f-4705-8d7d-c7afe6acd476 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.512448] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1890.512448] env[62519]: value = "task-1803227" [ 1890.512448] env[62519]: _type = "Task" [ 1890.512448] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.525263] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.604236] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38679fd-5a29-411d-8f66-460d296bb5e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.616134] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fcbe5c-531c-402d-8e03-c93ccb0327d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.656765] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d280f42-029a-4f1a-bed4-96f686433116 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.665723] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202f5374-849b-4a22-8e51-b7f22096b518 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.682740] env[62519]: DEBUG nova.compute.provider_tree [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1890.715066] env[62519]: DEBUG nova.compute.manager [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1890.781149] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.815829] env[62519]: DEBUG oslo_vmware.api [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803226, 'name': PowerOffVM_Task, 'duration_secs': 0.254994} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.816233] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1890.816449] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1890.816755] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3886329-2b1a-4270-a86d-6b0048a9ac8e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.827513] env[62519]: DEBUG nova.compute.manager [req-3d5a6e36-b156-4f8d-b333-6d207cb32df7 req-ddb8f3df-7fd3-44c2-878b-59a961bfd617 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Received event network-changed-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1890.827857] env[62519]: DEBUG nova.compute.manager [req-3d5a6e36-b156-4f8d-b333-6d207cb32df7 req-ddb8f3df-7fd3-44c2-878b-59a961bfd617 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Refreshing instance network info cache due to event network-changed-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1890.828186] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d5a6e36-b156-4f8d-b333-6d207cb32df7 req-ddb8f3df-7fd3-44c2-878b-59a961bfd617 service nova] Acquiring lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1890.828377] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d5a6e36-b156-4f8d-b333-6d207cb32df7 req-ddb8f3df-7fd3-44c2-878b-59a961bfd617 service nova] Acquired lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.828615] env[62519]: DEBUG nova.network.neutron [req-3d5a6e36-b156-4f8d-b333-6d207cb32df7 req-ddb8f3df-7fd3-44c2-878b-59a961bfd617 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Refreshing network info cache for port ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1890.966168] env[62519]: DEBUG oslo_concurrency.lockutils [None req-9931da0a-62ad-4f27-8671-2f46d6493ee6 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.400s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.970176] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9dc84b1-1645-4129-b703-744d1038bd6f tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.641s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.035441] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803227, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.151465] env[62519]: DEBUG nova.network.neutron [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Successfully created port: 421b10d0-d0fc-47f6-b77d-a123639b1c45 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1891.185729] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updated VIF entry in instance network info cache for port c5603e79-74fa-4628-bc67-0351b0407810. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1891.186237] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updating instance_info_cache with network_info: [{"id": "037e7926-5eac-487b-a2fa-4124ead561df", "address": "fa:16:3e:f0:b9:0e", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e7926-5e", "ovs_interfaceid": "037e7926-5eac-487b-a2fa-4124ead561df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c5603e79-74fa-4628-bc67-0351b0407810", "address": "fa:16:3e:7f:dd:7f", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5603e79-74", "ovs_interfaceid": "c5603e79-74fa-4628-bc67-0351b0407810", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.191358] env[62519]: DEBUG nova.scheduler.client.report [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1891.197307] env[62519]: DEBUG nova.network.neutron [req-4d141eb1-80ae-4f2b-9456-4246a17afbad req-962ce482-6d86-4435-829f-bca944fe1c7e service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Updated VIF entry in instance network info cache for port 64341ffd-0da7-4574-9702-3fc0a03eb0e3. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1891.197699] env[62519]: DEBUG nova.network.neutron [req-4d141eb1-80ae-4f2b-9456-4246a17afbad req-962ce482-6d86-4435-829f-bca944fe1c7e service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Updating instance_info_cache with network_info: [{"id": "64341ffd-0da7-4574-9702-3fc0a03eb0e3", "address": "fa:16:3e:62:ec:77", "network": {"id": "227244cd-a495-40a7-b8e1-1fb111ecd3ef", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-482185283-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c1732890db42f98f538f7a5ac0d542", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64341ffd-0d", "ovs_interfaceid": "64341ffd-0da7-4574-9702-3fc0a03eb0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.245802] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1891.246115] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1891.246291] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Deleting the datastore file [datastore1] 71edba06-1628-4749-8a51-5a9bcb003fda {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1891.246783] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06df84c1-d54f-4996-901d-bda6d309a6a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.258443] env[62519]: DEBUG oslo_vmware.api [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for the task: (returnval){ [ 1891.258443] env[62519]: value = "task-1803229" [ 1891.258443] env[62519]: _type = "Task" [ 1891.258443] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.275157] env[62519]: DEBUG oslo_vmware.api [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.281067] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.525400] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803227, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.918788} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.525400] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f3665f89-1747-4567-9e56-c937d4ac81da/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. [ 1891.526241] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce60575-b8f3-4956-aff0-e36e3e831d76 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.565145] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] f3665f89-1747-4567-9e56-c937d4ac81da/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1891.565546] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba29fd10-8c58-45a5-91f1-2b9f7bd2f771 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.586935] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1891.586935] env[62519]: value = "task-1803230" [ 1891.586935] env[62519]: _type = "Task" [ 1891.586935] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.599294] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803230, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.700886] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Releasing lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.701535] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Received event network-vif-plugged-082d375d-5e85-4e5e-a40e-661c492b5f5d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1891.701535] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquiring lock "31db4b14-0ba3-4159-accc-31c21bd81322-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.703085] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Lock "31db4b14-0ba3-4159-accc-31c21bd81322-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.703085] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Lock "31db4b14-0ba3-4159-accc-31c21bd81322-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.703085] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] No waiting events found dispatching network-vif-plugged-082d375d-5e85-4e5e-a40e-661c492b5f5d {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1891.703085] env[62519]: WARNING nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Received unexpected event network-vif-plugged-082d375d-5e85-4e5e-a40e-661c492b5f5d for instance with vm_state building and task_state spawning. [ 1891.703085] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Received event network-changed-082d375d-5e85-4e5e-a40e-661c492b5f5d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1891.703085] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Refreshing instance network info cache due to event network-changed-082d375d-5e85-4e5e-a40e-661c492b5f5d. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1891.704141] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquiring lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.704291] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquired lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.704454] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Refreshing network info cache for port 082d375d-5e85-4e5e-a40e-661c492b5f5d {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1891.706170] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.003s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.709556] env[62519]: DEBUG oslo_concurrency.lockutils [req-4d141eb1-80ae-4f2b-9456-4246a17afbad req-962ce482-6d86-4435-829f-bca944fe1c7e service nova] Releasing lock "refresh_cache-fc3beaba-2ad5-4598-b562-557fdd552b39" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.727664] env[62519]: DEBUG nova.compute.manager [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1891.733196] env[62519]: INFO nova.scheduler.client.report [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Deleted allocations for instance 70abb2e0-1ff2-49dd-b40f-9cac244a249e [ 1891.743119] env[62519]: DEBUG nova.network.neutron [req-3d5a6e36-b156-4f8d-b333-6d207cb32df7 req-ddb8f3df-7fd3-44c2-878b-59a961bfd617 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updated VIF entry in instance network info cache for port ca1a3bbf-3f10-4a96-a67d-b77464ab25e7. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1891.743828] env[62519]: DEBUG nova.network.neutron [req-3d5a6e36-b156-4f8d-b333-6d207cb32df7 req-ddb8f3df-7fd3-44c2-878b-59a961bfd617 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updating instance_info_cache with network_info: [{"id": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "address": "fa:16:3e:cc:ef:21", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1a3bbf-3f", "ovs_interfaceid": "ca1a3bbf-3f10-4a96-a67d-b77464ab25e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.776051] env[62519]: DEBUG oslo_vmware.api [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Task: {'id': task-1803229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.492427} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.777696] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.777915] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1891.778114] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1891.778290] env[62519]: INFO nova.compute.manager [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Took 1.49 seconds to destroy the instance on the hypervisor. [ 1891.778527] env[62519]: DEBUG oslo.service.loopingcall [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.778971] env[62519]: DEBUG nova.compute.manager [-] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1891.779079] env[62519]: DEBUG nova.network.neutron [-] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1891.786750] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.789203] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1891.789314] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1891.789467] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1891.789655] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1891.789833] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1891.789991] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1891.790331] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1891.790516] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1891.790713] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1891.790872] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1891.791073] env[62519]: DEBUG nova.virt.hardware [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1891.792507] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3ab9ea-1cbd-4c9a-9539-2715bf1d17f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.807631] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5185e4bd-02d5-4c78-828a-52786d3662e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.949115] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.949115] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.949115] env[62519]: DEBUG nova.compute.manager [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1891.949401] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437ca6d1-1735-48c2-b4d1-739cd7e2e66d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.957479] env[62519]: DEBUG nova.compute.manager [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1891.958273] env[62519]: DEBUG nova.objects.instance [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'flavor' on Instance uuid 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1892.101185] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.159595] env[62519]: DEBUG nova.compute.manager [req-f1b18cf3-a9e4-47e9-a294-eda944c22b56 req-344f9792-89bd-4dc1-90e3-2315937e94dc service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Received event network-vif-deleted-ff83eb74-d7d7-49d4-8a99-813264a7d1ab {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1892.159595] env[62519]: INFO nova.compute.manager [req-f1b18cf3-a9e4-47e9-a294-eda944c22b56 req-344f9792-89bd-4dc1-90e3-2315937e94dc service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Neutron deleted interface ff83eb74-d7d7-49d4-8a99-813264a7d1ab; detaching it from the instance and deleting it from the info cache [ 1892.159595] env[62519]: DEBUG nova.network.neutron [req-f1b18cf3-a9e4-47e9-a294-eda944c22b56 req-344f9792-89bd-4dc1-90e3-2315937e94dc service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.240509] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c9f0eb69-0e5a-4800-b99b-f2aeccf24d20 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "70abb2e0-1ff2-49dd-b40f-9cac244a249e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.780s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.250258] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.250518] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.250743] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "46b3a0fb-29f6-4b66-a091-2d125b69d109-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.250926] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.251106] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.252834] env[62519]: DEBUG oslo_concurrency.lockutils [req-3d5a6e36-b156-4f8d-b333-6d207cb32df7 req-ddb8f3df-7fd3-44c2-878b-59a961bfd617 service nova] Releasing lock "refresh_cache-11d4a010-959f-4f53-94dc-7499007612ad" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.254455] env[62519]: INFO nova.compute.manager [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Terminating instance [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server [None req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e tempest-DeleteServersTestJSON-1737011426 tempest-DeleteServersTestJSON-1737011426-project-member] Exception during message handling: nova.exception_Remote.InstanceActionNotFound_Remote: Action for request_id req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e on instance eb5de0a4-0af3-4731-ab30-3ae3d72207a7 not found [ 1892.265015] env[62519]: Traceback (most recent call last): [ 1892.265015] env[62519]: File "/opt/stack/nova/nova/conductor/manager.py", line 142, in _object_dispatch [ 1892.265015] env[62519]: return getattr(target, method)(*args, **kwargs) [ 1892.265015] env[62519]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 184, in wrapper [ 1892.265015] env[62519]: result = fn(cls, context, *args, **kwargs) [ 1892.265015] env[62519]: File "/opt/stack/nova/nova/objects/instance_action.py", line 93, in action_finish [ 1892.265015] env[62519]: db_action = db.action_finish(context, values) [ 1892.265015] env[62519]: File "/opt/stack/nova/nova/db/main/api.py", line 207, in wrapper [ 1892.265015] env[62519]: return f(context, *args, **kwargs) [ 1892.265015] env[62519]: File "/opt/stack/nova/nova/db/main/api.py", line 3913, in action_finish [ 1892.265015] env[62519]: raise exception.InstanceActionNotFound( [ 1892.265015] env[62519]: nova.exception.InstanceActionNotFound: Action for request_id req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e on instance eb5de0a4-0af3-4731-ab30-3ae3d72207a7 not found [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server raise self.value [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 169, in decorated_function [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server raise self.value [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 160, in decorated_function [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1531, in decorated_function [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server with ActionReporter(context, instance_uuid, [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1514, in __exit__ [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception( [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server raise self.value [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1510, in __exit__ [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server action.action_finish(self.context, uuid) [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 175, in wrapper [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server result = cls.indirection_api.object_class_action_versions( [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/conductor/rpcapi.py", line 240, in object_class_action_versions [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server return cctxt.call(context, 'object_class_action_versions', [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/client.py", line 190, in call [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server result = self.transport._send( [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/transport.py", line 123, in _send [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server return self._driver.send(target, ctxt, message, [ 1892.265015] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 800, in send [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server return self._send(target, ctxt, message, wait_for_reply, timeout, [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 792, in _send [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server raise result [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server nova.exception_Remote.InstanceActionNotFound_Remote: Action for request_id req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e on instance eb5de0a4-0af3-4731-ab30-3ae3d72207a7 not found [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/conductor/manager.py", line 142, in _object_dispatch [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server return getattr(target, method)(*args, **kwargs) [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 184, in wrapper [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server result = fn(cls, context, *args, **kwargs) [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/objects/instance_action.py", line 93, in action_finish [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server db_action = db.action_finish(context, values) [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/db/main/api.py", line 207, in wrapper [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server return f(context, *args, **kwargs) [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/db/main/api.py", line 3913, in action_finish [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server raise exception.InstanceActionNotFound( [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server nova.exception.InstanceActionNotFound: Action for request_id req-88b0e9f4-13de-4192-9161-fe94ce7f8c9e on instance eb5de0a4-0af3-4731-ab30-3ae3d72207a7 not found [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server [ 1892.266901] env[62519]: ERROR oslo_messaging.rpc.server [ 1892.277206] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.441379] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updated VIF entry in instance network info cache for port 082d375d-5e85-4e5e-a40e-661c492b5f5d. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1892.442018] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance_info_cache with network_info: [{"id": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "address": "fa:16:3e:4a:40:42", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap082d375d-5e", "ovs_interfaceid": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.550506] env[62519]: DEBUG nova.compute.manager [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Stashing vm_state: active {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 1892.602982] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803230, 'name': ReconfigVM_Task, 'duration_secs': 0.838808} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.603284] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfigured VM instance instance-0000005f to attach disk [datastore1] f3665f89-1747-4567-9e56-c937d4ac81da/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1892.604365] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192653ff-5f77-46f3-8f61-181ace48ed54 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.633115] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e00eb296-d0a4-4741-b1f2-ae56878dbf8b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.643389] env[62519]: DEBUG nova.network.neutron [-] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.651757] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1892.651757] env[62519]: value = "task-1803232" [ 1892.651757] env[62519]: _type = "Task" [ 1892.651757] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.660537] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803232, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.664763] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-862c262a-309b-4e13-b77f-46e0c6a79d78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.671775] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad9d858-a984-4624-97dc-9d95dc766ede {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.710430] env[62519]: DEBUG nova.compute.manager [req-f1b18cf3-a9e4-47e9-a294-eda944c22b56 req-344f9792-89bd-4dc1-90e3-2315937e94dc service nova] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Detach interface failed, port_id=ff83eb74-d7d7-49d4-8a99-813264a7d1ab, reason: Instance 71edba06-1628-4749-8a51-5a9bcb003fda could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1892.758044] env[62519]: DEBUG nova.compute.manager [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1892.758271] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1892.759359] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da56b9ae-7834-47b3-af85-dc9de806a709 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.767961] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1892.771025] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bd19bbe-f638-46b1-ad6d-7ce3f5f2a37c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.779016] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.780658] env[62519]: DEBUG oslo_vmware.api [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1892.780658] env[62519]: value = "task-1803233" [ 1892.780658] env[62519]: _type = "Task" [ 1892.780658] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.791196] env[62519]: DEBUG oslo_vmware.api [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.884759] env[62519]: DEBUG nova.compute.manager [req-bc5b2e39-b6c1-44b0-8336-11793d1e5115 req-97788a1c-a215-4faf-8892-65d38e674b57 service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Received event network-changed-082d375d-5e85-4e5e-a40e-661c492b5f5d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1892.884969] env[62519]: DEBUG nova.compute.manager [req-bc5b2e39-b6c1-44b0-8336-11793d1e5115 req-97788a1c-a215-4faf-8892-65d38e674b57 service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Refreshing instance network info cache due to event network-changed-082d375d-5e85-4e5e-a40e-661c492b5f5d. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1892.885174] env[62519]: DEBUG oslo_concurrency.lockutils [req-bc5b2e39-b6c1-44b0-8336-11793d1e5115 req-97788a1c-a215-4faf-8892-65d38e674b57 service nova] Acquiring lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.945780] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Releasing lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.947039] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Received event network-changed-872d6b35-c4a4-4975-8757-3c87471ba5df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1892.947039] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Refreshing instance network info cache due to event network-changed-872d6b35-c4a4-4975-8757-3c87471ba5df. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1892.947039] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquiring lock "refresh_cache-618a1db6-4056-4380-b5df-395ac14165a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.947039] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Acquired lock "refresh_cache-618a1db6-4056-4380-b5df-395ac14165a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.947039] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Refreshing network info cache for port 872d6b35-c4a4-4975-8757-3c87471ba5df {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1892.948123] env[62519]: DEBUG oslo_concurrency.lockutils [req-bc5b2e39-b6c1-44b0-8336-11793d1e5115 req-97788a1c-a215-4faf-8892-65d38e674b57 service nova] Acquired lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.948277] env[62519]: DEBUG nova.network.neutron [req-bc5b2e39-b6c1-44b0-8336-11793d1e5115 req-97788a1c-a215-4faf-8892-65d38e674b57 service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Refreshing network info cache for port 082d375d-5e85-4e5e-a40e-661c492b5f5d {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1892.967830] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1892.968178] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6ea6ec7-62e8-43fc-9fe4-911656511ef9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.976174] env[62519]: DEBUG oslo_vmware.api [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1892.976174] env[62519]: value = "task-1803234" [ 1892.976174] env[62519]: _type = "Task" [ 1892.976174] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.986762] env[62519]: DEBUG oslo_vmware.api [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803234, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.051518] env[62519]: DEBUG nova.network.neutron [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Successfully updated port: 421b10d0-d0fc-47f6-b77d-a123639b1c45 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1893.069803] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.070101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.146895] env[62519]: INFO nova.compute.manager [-] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Took 1.37 seconds to deallocate network for instance. [ 1893.161165] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803232, 'name': ReconfigVM_Task, 'duration_secs': 0.249093} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.161438] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1893.161687] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bc7950f-4625-4795-8d71-ad26effaec9b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.169022] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1893.169022] env[62519]: value = "task-1803235" [ 1893.169022] env[62519]: _type = "Task" [ 1893.169022] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.177177] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.277870] env[62519]: DEBUG oslo_vmware.api [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803218, 'name': ReconfigVM_Task, 'duration_secs': 6.040956} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.278157] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.278512] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Reconfigured VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1893.289316] env[62519]: DEBUG oslo_vmware.api [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803233, 'name': PowerOffVM_Task, 'duration_secs': 0.203785} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.289576] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1893.289764] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1893.290064] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48a0db26-56a1-4167-a710-ded0fbfecda2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.484326] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.484859] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.492644] env[62519]: DEBUG oslo_vmware.api [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803234, 'name': PowerOffVM_Task, 'duration_secs': 0.375475} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.492806] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1893.492865] env[62519]: DEBUG nova.compute.manager [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1893.493701] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944a6a11-7163-491c-867b-198043e0b286 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.555303] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "refresh_cache-a0c60d49-83bb-434b-815c-c39e7493cbb7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.555303] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "refresh_cache-a0c60d49-83bb-434b-815c-c39e7493cbb7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.555303] env[62519]: DEBUG nova.network.neutron [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1893.578259] env[62519]: INFO nova.compute.claims [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1893.594377] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1893.594377] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1893.594377] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleting the datastore file [datastore1] 46b3a0fb-29f6-4b66-a091-2d125b69d109 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1893.594377] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b645e6e8-166f-48b7-918a-d58c288fd2fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.600294] env[62519]: DEBUG oslo_vmware.api [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1893.600294] env[62519]: value = "task-1803237" [ 1893.600294] env[62519]: _type = "Task" [ 1893.600294] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.610782] env[62519]: DEBUG oslo_vmware.api [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.657263] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.684200] env[62519]: DEBUG oslo_vmware.api [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803235, 'name': PowerOnVM_Task, 'duration_secs': 0.416052} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.684480] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1893.687164] env[62519]: DEBUG nova.compute.manager [None req-47d089a4-bbf0-49c3-8821-7b27d9421f60 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1893.687955] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b02966-cdd2-4782-89e5-5d624b71f7f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.874277] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Updated VIF entry in instance network info cache for port 872d6b35-c4a4-4975-8757-3c87471ba5df. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1893.874735] env[62519]: DEBUG nova.network.neutron [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Updating instance_info_cache with network_info: [{"id": "872d6b35-c4a4-4975-8757-3c87471ba5df", "address": "fa:16:3e:f1:f5:3b", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap872d6b35-c4", "ovs_interfaceid": "872d6b35-c4a4-4975-8757-3c87471ba5df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.878016] env[62519]: DEBUG nova.network.neutron [req-bc5b2e39-b6c1-44b0-8336-11793d1e5115 req-97788a1c-a215-4faf-8892-65d38e674b57 service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updated VIF entry in instance network info cache for port 082d375d-5e85-4e5e-a40e-661c492b5f5d. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1893.878182] env[62519]: DEBUG nova.network.neutron [req-bc5b2e39-b6c1-44b0-8336-11793d1e5115 req-97788a1c-a215-4faf-8892-65d38e674b57 service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance_info_cache with network_info: [{"id": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "address": "fa:16:3e:4a:40:42", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap082d375d-5e", "ovs_interfaceid": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.986954] env[62519]: DEBUG nova.compute.manager [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1894.006181] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5eeb6587-adf0-43be-8c1a-61c4105beaba tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.058s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.084556] env[62519]: INFO nova.compute.resource_tracker [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating resource usage from migration 298ba8a7-ac5c-49c7-8268-5cab43f13b57 [ 1894.088229] env[62519]: DEBUG nova.network.neutron [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1894.116712] env[62519]: DEBUG oslo_vmware.api [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.514449} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.119660] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1894.119954] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1894.120165] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1894.120464] env[62519]: INFO nova.compute.manager [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Took 1.36 seconds to destroy the instance on the hypervisor. [ 1894.120578] env[62519]: DEBUG oslo.service.loopingcall [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1894.121352] env[62519]: DEBUG nova.compute.manager [-] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1894.121352] env[62519]: DEBUG nova.network.neutron [-] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1894.304113] env[62519]: DEBUG nova.compute.manager [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Received event network-vif-plugged-421b10d0-d0fc-47f6-b77d-a123639b1c45 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1894.304335] env[62519]: DEBUG oslo_concurrency.lockutils [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] Acquiring lock "a0c60d49-83bb-434b-815c-c39e7493cbb7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.304537] env[62519]: DEBUG oslo_concurrency.lockutils [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.304701] env[62519]: DEBUG oslo_concurrency.lockutils [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.304859] env[62519]: DEBUG nova.compute.manager [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] No waiting events found dispatching network-vif-plugged-421b10d0-d0fc-47f6-b77d-a123639b1c45 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1894.305357] env[62519]: WARNING nova.compute.manager [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Received unexpected event network-vif-plugged-421b10d0-d0fc-47f6-b77d-a123639b1c45 for instance with vm_state building and task_state spawning. [ 1894.305559] env[62519]: DEBUG nova.compute.manager [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Received event network-changed-421b10d0-d0fc-47f6-b77d-a123639b1c45 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1894.305718] env[62519]: DEBUG nova.compute.manager [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Refreshing instance network info cache due to event network-changed-421b10d0-d0fc-47f6-b77d-a123639b1c45. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1894.305920] env[62519]: DEBUG oslo_concurrency.lockutils [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] Acquiring lock "refresh_cache-a0c60d49-83bb-434b-815c-c39e7493cbb7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.350554] env[62519]: DEBUG nova.network.neutron [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Updating instance_info_cache with network_info: [{"id": "421b10d0-d0fc-47f6-b77d-a123639b1c45", "address": "fa:16:3e:63:1a:19", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421b10d0-d0", "ovs_interfaceid": "421b10d0-d0fc-47f6-b77d-a123639b1c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.372763] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c32cefc-d2d5-48e8-a28b-597b2a9d1433 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.377635] env[62519]: DEBUG oslo_concurrency.lockutils [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] Releasing lock "refresh_cache-618a1db6-4056-4380-b5df-395ac14165a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.377945] env[62519]: DEBUG nova.compute.manager [req-08b70b93-47aa-40aa-a7f7-ee6d50ed5c9f req-1542d30f-4654-4d26-93a0-932ae9719ebf service nova] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Received event network-vif-deleted-98c5e48e-5515-4c54-af43-86a9b283477d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1894.381495] env[62519]: DEBUG oslo_concurrency.lockutils [req-bc5b2e39-b6c1-44b0-8336-11793d1e5115 req-97788a1c-a215-4faf-8892-65d38e674b57 service nova] Releasing lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.383125] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77911b6-1489-4ab7-8bda-a00756096767 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.424654] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997c360c-585a-4473-88f6-8d214e31e443 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.435684] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7facb97-d0a9-43ba-b44e-c910ec798e0f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.453405] env[62519]: DEBUG nova.compute.provider_tree [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.514547] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.631663] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.631663] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.631663] env[62519]: DEBUG nova.network.neutron [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1894.855386] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "refresh_cache-a0c60d49-83bb-434b-815c-c39e7493cbb7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.855386] env[62519]: DEBUG nova.compute.manager [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Instance network_info: |[{"id": "421b10d0-d0fc-47f6-b77d-a123639b1c45", "address": "fa:16:3e:63:1a:19", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421b10d0-d0", "ovs_interfaceid": "421b10d0-d0fc-47f6-b77d-a123639b1c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1894.855386] env[62519]: DEBUG oslo_concurrency.lockutils [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] Acquired lock "refresh_cache-a0c60d49-83bb-434b-815c-c39e7493cbb7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.855386] env[62519]: DEBUG nova.network.neutron [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Refreshing network info cache for port 421b10d0-d0fc-47f6-b77d-a123639b1c45 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1894.856723] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:1a:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '421b10d0-d0fc-47f6-b77d-a123639b1c45', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1894.870847] env[62519]: DEBUG oslo.service.loopingcall [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1894.872483] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1894.872829] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a07901c-d229-43a2-bbc6-af5b2f21dc7d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.892770] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.893133] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.893229] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.893406] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.893561] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.899929] env[62519]: INFO nova.compute.manager [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Terminating instance [ 1894.910531] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1894.910531] env[62519]: value = "task-1803238" [ 1894.910531] env[62519]: _type = "Task" [ 1894.910531] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.920678] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803238, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.956185] env[62519]: DEBUG nova.scheduler.client.report [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1895.250246] env[62519]: DEBUG nova.network.neutron [-] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.408225] env[62519]: DEBUG nova.compute.manager [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1895.408444] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1895.409396] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32f0632-0b84-4b24-b8b4-be61257333db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.414556] env[62519]: INFO nova.network.neutron [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Port c5603e79-74fa-4628-bc67-0351b0407810 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1895.414900] env[62519]: DEBUG nova.network.neutron [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updating instance_info_cache with network_info: [{"id": "037e7926-5eac-487b-a2fa-4124ead561df", "address": "fa:16:3e:f0:b9:0e", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e7926-5e", "ovs_interfaceid": "037e7926-5eac-487b-a2fa-4124ead561df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.431831] env[62519]: DEBUG nova.compute.manager [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Stashing vm_state: stopped {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 1895.441453] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1895.441453] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c909c2c-f045-4d76-a04d-282e2416c8f4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.444507] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803238, 'name': CreateVM_Task, 'duration_secs': 0.425845} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.446451] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1895.446793] env[62519]: DEBUG oslo_vmware.api [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1895.446793] env[62519]: value = "task-1803239" [ 1895.446793] env[62519]: _type = "Task" [ 1895.446793] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.447793] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.447793] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.448050] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1895.448659] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5aec7de-d4ac-45a9-b62b-125265a1725c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.460296] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1895.460296] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52113fa1-0ecb-ca68-55a7-2386fb0bab37" [ 1895.460296] env[62519]: _type = "Task" [ 1895.460296] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.465065] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.395s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.465356] env[62519]: INFO nova.compute.manager [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Migrating [ 1895.474085] env[62519]: DEBUG oslo_vmware.api [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803239, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.482372] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.823s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.482372] env[62519]: DEBUG nova.objects.instance [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lazy-loading 'resources' on Instance uuid 71edba06-1628-4749-8a51-5a9bcb003fda {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1895.503693] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52113fa1-0ecb-ca68-55a7-2386fb0bab37, 'name': SearchDatastore_Task, 'duration_secs': 0.011524} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.504019] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.504263] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1895.507017] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.507017] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.507017] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1895.507017] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1822671-c7c8-4662-826f-87cee34fe639 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.516894] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1895.517109] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1895.521118] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fba9d1d3-caa2-4954-9c0d-6b7d2e1bc3b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.528354] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1895.528354] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fedc49-5b4b-a4e9-eaae-4cde02246aeb" [ 1895.528354] env[62519]: _type = "Task" [ 1895.528354] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.537881] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fedc49-5b4b-a4e9-eaae-4cde02246aeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.630748] env[62519]: INFO nova.compute.manager [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Unrescuing [ 1895.631079] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.631262] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquired lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.631436] env[62519]: DEBUG nova.network.neutron [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1895.660026] env[62519]: DEBUG nova.network.neutron [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Updated VIF entry in instance network info cache for port 421b10d0-d0fc-47f6-b77d-a123639b1c45. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1895.660404] env[62519]: DEBUG nova.network.neutron [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Updating instance_info_cache with network_info: [{"id": "421b10d0-d0fc-47f6-b77d-a123639b1c45", "address": "fa:16:3e:63:1a:19", "network": {"id": "8af40fa9-af04-4837-8508-5d0e5b7f1d9c", "bridge": "br-int", "label": "tempest-ServersTestJSON-1824239670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e755fb5a6e94068b6c99b1638081f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421b10d0-d0", "ovs_interfaceid": "421b10d0-d0fc-47f6-b77d-a123639b1c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.754688] env[62519]: INFO nova.compute.manager [-] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Took 1.63 seconds to deallocate network for instance. [ 1895.918129] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.954062] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.958882] env[62519]: DEBUG oslo_vmware.api [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803239, 'name': PowerOffVM_Task, 'duration_secs': 0.273339} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.958882] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1895.963020] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1895.963020] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c745ae4-bec0-4862-b5dd-57a84d7a87ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.990388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.990731] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.990935] env[62519]: DEBUG nova.network.neutron [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1896.040571] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fedc49-5b4b-a4e9-eaae-4cde02246aeb, 'name': SearchDatastore_Task, 'duration_secs': 0.010593} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.041481] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16db146f-3154-47b2-b875-490969b39b06 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.047750] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1896.047750] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52207fd9-87eb-c724-dca2-5f965ce76d24" [ 1896.047750] env[62519]: _type = "Task" [ 1896.047750] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.059253] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52207fd9-87eb-c724-dca2-5f965ce76d24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.071679] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1896.071911] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1896.072137] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleting the datastore file [datastore1] 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1896.072501] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4162dcd3-8db6-4852-bf0c-d06a8db0e10a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.080449] env[62519]: DEBUG oslo_vmware.api [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1896.080449] env[62519]: value = "task-1803241" [ 1896.080449] env[62519]: _type = "Task" [ 1896.080449] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.091247] env[62519]: DEBUG oslo_vmware.api [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.163278] env[62519]: DEBUG oslo_concurrency.lockutils [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] Releasing lock "refresh_cache-a0c60d49-83bb-434b-815c-c39e7493cbb7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.163549] env[62519]: DEBUG nova.compute.manager [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Received event network-vif-deleted-c5603e79-74fa-4628-bc67-0351b0407810 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1896.163728] env[62519]: INFO nova.compute.manager [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Neutron deleted interface c5603e79-74fa-4628-bc67-0351b0407810; detaching it from the instance and deleting it from the info cache [ 1896.163993] env[62519]: DEBUG nova.network.neutron [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updating instance_info_cache with network_info: [{"id": "037e7926-5eac-487b-a2fa-4124ead561df", "address": "fa:16:3e:f0:b9:0e", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e7926-5e", "ovs_interfaceid": "037e7926-5eac-487b-a2fa-4124ead561df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.252574] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f0278f-3f6e-447c-8146-49e504ba012e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.262789] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6b86eb-62a6-4cf3-83e4-9a00a92db8b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.267391] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.300153] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c5a1e1-c4bf-4843-8aec-35a09f24106d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.309923] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645203a8-fd89-47a5-99ed-856fef224285 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.324940] env[62519]: DEBUG nova.compute.provider_tree [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1896.339216] env[62519]: DEBUG nova.compute.manager [req-d08256c3-bc16-4579-9c44-484913b3a2a6 req-2317769c-7c1f-4af6-a48e-86e8ac7604c8 service nova] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Received event network-vif-deleted-bcc785a2-3385-4fe4-85fc-7540000eb36b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1896.423168] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1743e4f0-4a04-4d66-b257-6b699d7ecc4f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-10bfd4ac-6f11-4c96-87a0-ce74bc1193c4-c5603e79-74fa-4628-bc67-0351b0407810" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.812s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.425668] env[62519]: DEBUG nova.network.neutron [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating instance_info_cache with network_info: [{"id": "7220339b-d4a7-441b-8227-c956744ce0c0", "address": "fa:16:3e:0f:ad:ae", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7220339b-d4", "ovs_interfaceid": "7220339b-d4a7-441b-8227-c956744ce0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.558869] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52207fd9-87eb-c724-dca2-5f965ce76d24, 'name': SearchDatastore_Task, 'duration_secs': 0.012483} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.559246] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.559488] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] a0c60d49-83bb-434b-815c-c39e7493cbb7/a0c60d49-83bb-434b-815c-c39e7493cbb7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1896.559777] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48cc8f26-d358-4569-9507-3cb06a847bef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.567954] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1896.567954] env[62519]: value = "task-1803242" [ 1896.567954] env[62519]: _type = "Task" [ 1896.567954] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.579492] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.589317] env[62519]: DEBUG oslo_vmware.api [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.3749} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.591918] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1896.592149] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1896.592343] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1896.592531] env[62519]: INFO nova.compute.manager [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1896.592787] env[62519]: DEBUG oslo.service.loopingcall [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1896.593022] env[62519]: DEBUG nova.compute.manager [-] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1896.593114] env[62519]: DEBUG nova.network.neutron [-] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1896.669534] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60f8ddeb-0872-4188-937f-ce276f5234de {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.681042] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6fbe6c-017d-4f4a-81ae-39177fa40a74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.727842] env[62519]: DEBUG nova.compute.manager [req-e5e08bfd-beeb-4ffc-bde2-1e547fe1c1fe req-511e1851-3f87-46ba-9354-79a66e3d5d94 service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Detach interface failed, port_id=c5603e79-74fa-4628-bc67-0351b0407810, reason: Instance 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1896.828176] env[62519]: DEBUG nova.scheduler.client.report [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1896.897975] env[62519]: DEBUG nova.network.neutron [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance_info_cache with network_info: [{"id": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "address": "fa:16:3e:4a:40:42", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap082d375d-5e", "ovs_interfaceid": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.930192] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Releasing lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.930980] env[62519]: DEBUG nova.objects.instance [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lazy-loading 'flavor' on Instance uuid f3665f89-1747-4567-9e56-c937d4ac81da {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1897.080468] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803242, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475437} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.080776] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] a0c60d49-83bb-434b-815c-c39e7493cbb7/a0c60d49-83bb-434b-815c-c39e7493cbb7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1897.081057] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1897.081325] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef9b69d1-10eb-4f0e-9b49-cbce8a53c4e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.090063] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1897.090063] env[62519]: value = "task-1803243" [ 1897.090063] env[62519]: _type = "Task" [ 1897.090063] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.099610] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803243, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.337937] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.858s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.341283] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.826s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.345663] env[62519]: INFO nova.compute.claims [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1897.367286] env[62519]: INFO nova.scheduler.client.report [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Deleted allocations for instance 71edba06-1628-4749-8a51-5a9bcb003fda [ 1897.401763] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.443391] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2c018a-8245-4262-a7a4-cf06bf7cf9f4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.476659] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1897.476877] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e295de6-fa75-407b-b98b-5c73b4950167 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.486751] env[62519]: DEBUG oslo_vmware.api [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1897.486751] env[62519]: value = "task-1803244" [ 1897.486751] env[62519]: _type = "Task" [ 1897.486751] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.497077] env[62519]: DEBUG oslo_vmware.api [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.602939] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803243, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069115} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.603281] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1897.604136] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2363edc-fb5a-4fea-9dce-9d423d185c82 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.629219] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] a0c60d49-83bb-434b-815c-c39e7493cbb7/a0c60d49-83bb-434b-815c-c39e7493cbb7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1897.629514] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-493337d5-c317-497d-a542-81fa8e5c2faa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.652726] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1897.652726] env[62519]: value = "task-1803245" [ 1897.652726] env[62519]: _type = "Task" [ 1897.652726] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.664078] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803245, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.681540] env[62519]: DEBUG nova.network.neutron [-] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.875062] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5cca4c34-b05c-4a92-b943-d0f0eb6d26e2 tempest-ServerMetadataTestJSON-420459941 tempest-ServerMetadataTestJSON-420459941-project-member] Lock "71edba06-1628-4749-8a51-5a9bcb003fda" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.096s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.893241] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "7bf9ee64-6c45-480e-959f-ff8395b7c446" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.893927] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "7bf9ee64-6c45-480e-959f-ff8395b7c446" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.997259] env[62519]: DEBUG oslo_vmware.api [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803244, 'name': PowerOffVM_Task, 'duration_secs': 0.252706} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.997577] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1898.002887] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfiguring VM instance instance-0000005f to detach disk 2002 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1898.003160] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a43e49c6-cccd-4cf7-b95d-635e839b114b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.031379] env[62519]: DEBUG oslo_vmware.api [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1898.031379] env[62519]: value = "task-1803246" [ 1898.031379] env[62519]: _type = "Task" [ 1898.031379] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.039721] env[62519]: DEBUG oslo_vmware.api [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803246, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.166304] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803245, 'name': ReconfigVM_Task, 'duration_secs': 0.319067} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.166555] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Reconfigured VM instance instance-0000006b to attach disk [datastore1] a0c60d49-83bb-434b-815c-c39e7493cbb7/a0c60d49-83bb-434b-815c-c39e7493cbb7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1898.167410] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-390ed13b-893c-47b2-957e-a7f9d73c72ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.177335] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1898.177335] env[62519]: value = "task-1803247" [ 1898.177335] env[62519]: _type = "Task" [ 1898.177335] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.184174] env[62519]: INFO nova.compute.manager [-] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Took 1.59 seconds to deallocate network for instance. [ 1898.189971] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803247, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.367948] env[62519]: DEBUG nova.compute.manager [req-6cf5dacc-09fc-4708-8bae-0d6b7f441f6f req-134889fa-371e-4912-96bd-5baee2fdd2ed service nova] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Received event network-vif-deleted-037e7926-5eac-487b-a2fa-4124ead561df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1898.397342] env[62519]: DEBUG nova.compute.manager [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1898.543072] env[62519]: DEBUG oslo_vmware.api [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803246, 'name': ReconfigVM_Task, 'duration_secs': 0.46289} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.543443] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfigured VM instance instance-0000005f to detach disk 2002 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1898.543553] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1898.543800] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce7b54a0-7dac-41cd-91a7-170438460d95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.554908] env[62519]: DEBUG oslo_vmware.api [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1898.554908] env[62519]: value = "task-1803248" [ 1898.554908] env[62519]: _type = "Task" [ 1898.554908] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.569382] env[62519]: DEBUG oslo_vmware.api [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803248, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.612507] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bca13fa-3613-4aa6-8a07-e16eccfe17dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.620771] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ca1735-a8ab-475b-9833-aa2d1bb762f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.652782] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611858cc-3bbb-475b-bd80-1b25b4e338b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.661148] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa696b76-624d-4a07-8ccd-855b9d677fc6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.675246] env[62519]: DEBUG nova.compute.provider_tree [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.686837] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803247, 'name': Rename_Task, 'duration_secs': 0.160882} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.687130] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1898.687369] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e690611-28c7-4cb4-af92-d6af795685b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.694676] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.698078] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1898.698078] env[62519]: value = "task-1803249" [ 1898.698078] env[62519]: _type = "Task" [ 1898.698078] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.706276] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.917485] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.918958] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08542ac-4aea-4b95-8eff-0914b553f8e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.942019] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance '31db4b14-0ba3-4159-accc-31c21bd81322' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1899.066160] env[62519]: DEBUG oslo_vmware.api [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803248, 'name': PowerOnVM_Task, 'duration_secs': 0.511366} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.066633] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1899.066633] env[62519]: DEBUG nova.compute.manager [None req-0f0911e1-0cfa-48f5-a707-d04506894ed0 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1899.067466] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68189bb9-f62a-43b9-ab09-49c9631fb2e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.178737] env[62519]: DEBUG nova.scheduler.client.report [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1899.210572] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803249, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.448482] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1899.448781] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-083aa2f5-a016-494a-adba-279c59103b6f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.457463] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1899.457463] env[62519]: value = "task-1803250" [ 1899.457463] env[62519]: _type = "Task" [ 1899.457463] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.467574] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.685656] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.686593] env[62519]: DEBUG nova.compute.manager [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1899.690299] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.736s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.712591] env[62519]: DEBUG oslo_vmware.api [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803249, 'name': PowerOnVM_Task, 'duration_secs': 0.523468} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.712905] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1899.713147] env[62519]: INFO nova.compute.manager [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Took 7.99 seconds to spawn the instance on the hypervisor. [ 1899.713502] env[62519]: DEBUG nova.compute.manager [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1899.714181] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f83ab6e-c516-4041-8ce6-b7c9865345c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.976225] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803250, 'name': PowerOffVM_Task, 'duration_secs': 0.219324} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.976322] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1899.976549] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance '31db4b14-0ba3-4159-accc-31c21bd81322' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1900.197080] env[62519]: DEBUG nova.compute.utils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1900.200679] env[62519]: INFO nova.compute.claims [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1900.208020] env[62519]: DEBUG nova.compute.manager [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1900.208020] env[62519]: DEBUG nova.network.neutron [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1900.234801] env[62519]: INFO nova.compute.manager [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Took 12.99 seconds to build instance. [ 1900.288795] env[62519]: DEBUG nova.policy [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b94adf2a04874e489cdadc04a95ae3af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9808ad7e38e34658aac06ebc932b0e32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1900.483653] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1900.483999] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1900.484214] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1900.484413] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1900.484561] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1900.484709] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1900.484917] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1900.485225] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1900.485885] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1900.486113] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1900.486303] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1900.492804] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acf467b7-f84c-4dd7-a7a7-fdd56c844f74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.517790] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1900.517790] env[62519]: value = "task-1803251" [ 1900.517790] env[62519]: _type = "Task" [ 1900.517790] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.530709] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803251, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.706016] env[62519]: DEBUG nova.compute.manager [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1900.709237] env[62519]: INFO nova.compute.resource_tracker [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating resource usage from migration e85262b2-652f-424f-b78d-dea43ab76545 [ 1900.915308] env[62519]: DEBUG nova.compute.manager [req-927a0d62-3e6d-4c82-b394-71139a9d98c6 req-23b376a8-be59-48d3-abbf-a91a839ddc86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Received event network-changed-7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1900.915428] env[62519]: DEBUG nova.compute.manager [req-927a0d62-3e6d-4c82-b394-71139a9d98c6 req-23b376a8-be59-48d3-abbf-a91a839ddc86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Refreshing instance network info cache due to event network-changed-7220339b-d4a7-441b-8227-c956744ce0c0. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1900.915537] env[62519]: DEBUG oslo_concurrency.lockutils [req-927a0d62-3e6d-4c82-b394-71139a9d98c6 req-23b376a8-be59-48d3-abbf-a91a839ddc86 service nova] Acquiring lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.916238] env[62519]: DEBUG oslo_concurrency.lockutils [req-927a0d62-3e6d-4c82-b394-71139a9d98c6 req-23b376a8-be59-48d3-abbf-a91a839ddc86 service nova] Acquired lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.916238] env[62519]: DEBUG nova.network.neutron [req-927a0d62-3e6d-4c82-b394-71139a9d98c6 req-23b376a8-be59-48d3-abbf-a91a839ddc86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Refreshing network info cache for port 7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1901.015268] env[62519]: DEBUG nova.network.neutron [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Successfully created port: d1511a81-0fe4-44f7-b837-a2e080534be6 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1901.033673] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803251, 'name': ReconfigVM_Task, 'duration_secs': 0.329408} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.038022] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance '31db4b14-0ba3-4159-accc-31c21bd81322' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1901.110928] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9dabbc-27e9-4b79-b85e-ecba1ae28a73 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.121492] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46284851-4cac-4eaa-b567-0c3f3e84a3ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.154567] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de51aff-b0e2-4573-930c-e46e19ad97df {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.163933] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860a1190-e628-4a37-9eb5-c71b5884ed17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.181537] env[62519]: DEBUG nova.compute.provider_tree [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1901.546088] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1901.546455] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1901.546812] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1901.547147] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1901.547436] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1901.547634] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1901.547926] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1901.548262] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1901.548560] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1901.549106] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1901.549140] env[62519]: DEBUG nova.virt.hardware [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1901.558299] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Reconfiguring VM instance instance-0000006a to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1901.562174] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8450f74-2d07-427a-8e1c-f2d0ae5e3a4c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.589227] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1901.589227] env[62519]: value = "task-1803252" [ 1901.589227] env[62519]: _type = "Task" [ 1901.589227] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.598198] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803252, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.681088] env[62519]: DEBUG nova.network.neutron [req-927a0d62-3e6d-4c82-b394-71139a9d98c6 req-23b376a8-be59-48d3-abbf-a91a839ddc86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updated VIF entry in instance network info cache for port 7220339b-d4a7-441b-8227-c956744ce0c0. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1901.681491] env[62519]: DEBUG nova.network.neutron [req-927a0d62-3e6d-4c82-b394-71139a9d98c6 req-23b376a8-be59-48d3-abbf-a91a839ddc86 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating instance_info_cache with network_info: [{"id": "7220339b-d4a7-441b-8227-c956744ce0c0", "address": "fa:16:3e:0f:ad:ae", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7220339b-d4", "ovs_interfaceid": "7220339b-d4a7-441b-8227-c956744ce0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.685459] env[62519]: DEBUG nova.scheduler.client.report [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1901.718973] env[62519]: DEBUG nova.compute.manager [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1901.745380] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1901.745629] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1901.745791] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1901.745975] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1901.746173] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1901.746337] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1901.746544] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1901.746703] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1901.746872] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1901.747054] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1901.747256] env[62519]: DEBUG nova.virt.hardware [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1901.747975] env[62519]: DEBUG oslo_concurrency.lockutils [None req-940b0ecd-a4df-4677-a140-fd29162a361e tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.519s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.748825] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4595bd74-e69c-4ee4-9a83-4bea23135657 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.757162] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "a0c60d49-83bb-434b-815c-c39e7493cbb7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.757397] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.757573] env[62519]: DEBUG nova.compute.manager [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1901.758467] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6eae51a-4f54-4810-8725-a88f5fb1e475 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.762371] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8ff1be-fb89-4634-b932-fba8ab0100ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.771134] env[62519]: DEBUG nova.compute.manager [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 1901.771738] env[62519]: DEBUG nova.objects.instance [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lazy-loading 'flavor' on Instance uuid a0c60d49-83bb-434b-815c-c39e7493cbb7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1902.099894] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803252, 'name': ReconfigVM_Task, 'duration_secs': 0.351188} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.100199] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Reconfigured VM instance instance-0000006a to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1902.100976] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ab2c9b-2302-46e1-8de0-ce2301b8f94d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.122915] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6/volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1902.123120] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d36e5b7-d4bc-4a51-96a9-78925511c9af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.141812] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1902.141812] env[62519]: value = "task-1803253" [ 1902.141812] env[62519]: _type = "Task" [ 1902.141812] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.150543] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.184463] env[62519]: DEBUG oslo_concurrency.lockutils [req-927a0d62-3e6d-4c82-b394-71139a9d98c6 req-23b376a8-be59-48d3-abbf-a91a839ddc86 service nova] Releasing lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.189662] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.499s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.190017] env[62519]: INFO nova.compute.manager [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Migrating [ 1902.197536] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.930s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.197838] env[62519]: DEBUG nova.objects.instance [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'resources' on Instance uuid 46b3a0fb-29f6-4b66-a091-2d125b69d109 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1902.527663] env[62519]: DEBUG nova.network.neutron [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Successfully updated port: d1511a81-0fe4-44f7-b837-a2e080534be6 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1902.652422] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803253, 'name': ReconfigVM_Task, 'duration_secs': 0.263341} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.652794] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6/volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1902.652904] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance '31db4b14-0ba3-4159-accc-31c21bd81322' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1902.709681] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.710081] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.710130] env[62519]: DEBUG nova.network.neutron [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1902.787443] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1902.788296] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e924f3de-253e-4193-8174-9c6aeb0c9dc0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.796393] env[62519]: DEBUG oslo_vmware.api [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1902.796393] env[62519]: value = "task-1803254" [ 1902.796393] env[62519]: _type = "Task" [ 1902.796393] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.807771] env[62519]: DEBUG oslo_vmware.api [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.945905] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215cd76c-98fe-41ce-8b3d-5945087b767f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.951705] env[62519]: DEBUG nova.compute.manager [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Received event network-changed-7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1902.951923] env[62519]: DEBUG nova.compute.manager [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Refreshing instance network info cache due to event network-changed-7220339b-d4a7-441b-8227-c956744ce0c0. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1902.952155] env[62519]: DEBUG oslo_concurrency.lockutils [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] Acquiring lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.952295] env[62519]: DEBUG oslo_concurrency.lockutils [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] Acquired lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.952454] env[62519]: DEBUG nova.network.neutron [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Refreshing network info cache for port 7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1902.959858] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0b2cf4-f7ca-4c6f-b390-33c67b5f960e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.992831] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50848be2-d10b-4775-bb6b-b45d79e75c5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.000847] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a98b8b-0bf0-4b2a-9cc7-beb2bbbf89a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.015173] env[62519]: DEBUG nova.compute.provider_tree [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1903.031276] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "refresh_cache-4719e47d-dc12-4f9c-aff3-46b083bd7e48" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.031449] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired lock "refresh_cache-4719e47d-dc12-4f9c-aff3-46b083bd7e48" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.031587] env[62519]: DEBUG nova.network.neutron [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1903.160694] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367c4795-2395-4e93-8914-102fd0073b79 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.180728] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d86940e-06cd-4d0b-8c34-0c7d269dafe3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.198629] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance '31db4b14-0ba3-4159-accc-31c21bd81322' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1903.307281] env[62519]: DEBUG oslo_vmware.api [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803254, 'name': PowerOffVM_Task, 'duration_secs': 0.243435} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.309490] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1903.309697] env[62519]: DEBUG nova.compute.manager [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1903.310494] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e9e548-0495-4d66-87ee-f5688f526f2b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.420053] env[62519]: DEBUG nova.network.neutron [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance_info_cache with network_info: [{"id": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "address": "fa:16:3e:6c:b5:c8", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8b7229e-2b", "ovs_interfaceid": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.536526] env[62519]: ERROR nova.scheduler.client.report [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [req-43c78152-f8c8-44eb-bd38-7b6ad0e36911] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-43c78152-f8c8-44eb-bd38-7b6ad0e36911"}]} [ 1903.556573] env[62519]: DEBUG nova.scheduler.client.report [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1903.574891] env[62519]: DEBUG nova.scheduler.client.report [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1903.575170] env[62519]: DEBUG nova.compute.provider_tree [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1903.587835] env[62519]: DEBUG nova.scheduler.client.report [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1903.591271] env[62519]: DEBUG nova.network.neutron [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1903.606875] env[62519]: DEBUG nova.scheduler.client.report [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1903.746132] env[62519]: DEBUG nova.network.neutron [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updated VIF entry in instance network info cache for port 7220339b-d4a7-441b-8227-c956744ce0c0. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1903.746510] env[62519]: DEBUG nova.network.neutron [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating instance_info_cache with network_info: [{"id": "7220339b-d4a7-441b-8227-c956744ce0c0", "address": "fa:16:3e:0f:ad:ae", "network": {"id": "8d760b11-0e90-40f8-a7a6-509485520bf3", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-619921944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "448555031bb64aefafd0fcc67f4df10a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7220339b-d4", "ovs_interfaceid": "7220339b-d4a7-441b-8227-c956744ce0c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.800661] env[62519]: DEBUG nova.network.neutron [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Updating instance_info_cache with network_info: [{"id": "d1511a81-0fe4-44f7-b837-a2e080534be6", "address": "fa:16:3e:3c:a8:38", "network": {"id": "b50c1a89-c14c-481c-b5d9-748f18863a45", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-698120386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9808ad7e38e34658aac06ebc932b0e32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1511a81-0f", "ovs_interfaceid": "d1511a81-0fe4-44f7-b837-a2e080534be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.821937] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3c0e1e7d-568e-474b-93f8-7d01020b9273 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.064s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.842708] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c369cd-2909-49c1-9ecb-0af83f0fce11 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.852438] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c65e42-aa12-42b9-9e0c-7876933f3ad8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.900467] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed51c6c-082b-4e22-b7cc-4185aebc4f9c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.910594] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8543ff5-bd1c-42b9-85eb-c6e2debd0d00 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.925196] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.926756] env[62519]: DEBUG nova.compute.provider_tree [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1904.251682] env[62519]: DEBUG oslo_concurrency.lockutils [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] Releasing lock "refresh_cache-f3665f89-1747-4567-9e56-c937d4ac81da" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.251962] env[62519]: DEBUG nova.compute.manager [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Received event network-vif-plugged-d1511a81-0fe4-44f7-b837-a2e080534be6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1904.252305] env[62519]: DEBUG oslo_concurrency.lockutils [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] Acquiring lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.252596] env[62519]: DEBUG oslo_concurrency.lockutils [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] Lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.252778] env[62519]: DEBUG oslo_concurrency.lockutils [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] Lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.253043] env[62519]: DEBUG nova.compute.manager [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] No waiting events found dispatching network-vif-plugged-d1511a81-0fe4-44f7-b837-a2e080534be6 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1904.253249] env[62519]: WARNING nova.compute.manager [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Received unexpected event network-vif-plugged-d1511a81-0fe4-44f7-b837-a2e080534be6 for instance with vm_state building and task_state spawning. [ 1904.253423] env[62519]: DEBUG nova.compute.manager [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Received event network-changed-d1511a81-0fe4-44f7-b837-a2e080534be6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1904.253582] env[62519]: DEBUG nova.compute.manager [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Refreshing instance network info cache due to event network-changed-d1511a81-0fe4-44f7-b837-a2e080534be6. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1904.253756] env[62519]: DEBUG oslo_concurrency.lockutils [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] Acquiring lock "refresh_cache-4719e47d-dc12-4f9c-aff3-46b083bd7e48" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.303775] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Releasing lock "refresh_cache-4719e47d-dc12-4f9c-aff3-46b083bd7e48" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.304094] env[62519]: DEBUG nova.compute.manager [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Instance network_info: |[{"id": "d1511a81-0fe4-44f7-b837-a2e080534be6", "address": "fa:16:3e:3c:a8:38", "network": {"id": "b50c1a89-c14c-481c-b5d9-748f18863a45", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-698120386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9808ad7e38e34658aac06ebc932b0e32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1511a81-0f", "ovs_interfaceid": "d1511a81-0fe4-44f7-b837-a2e080534be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1904.304443] env[62519]: DEBUG oslo_concurrency.lockutils [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] Acquired lock "refresh_cache-4719e47d-dc12-4f9c-aff3-46b083bd7e48" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.304626] env[62519]: DEBUG nova.network.neutron [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Refreshing network info cache for port d1511a81-0fe4-44f7-b837-a2e080534be6 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1904.305800] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:a8:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1511a81-0fe4-44f7-b837-a2e080534be6', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1904.313637] env[62519]: DEBUG oslo.service.loopingcall [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1904.314551] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1904.314780] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c284d2bb-2a24-42f9-8d0c-6c7d4294c953 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.338576] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1904.338576] env[62519]: value = "task-1803255" [ 1904.338576] env[62519]: _type = "Task" [ 1904.338576] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.348116] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803255, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.461175] env[62519]: DEBUG nova.scheduler.client.report [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 152 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1904.461522] env[62519]: DEBUG nova.compute.provider_tree [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 152 to 153 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1904.461756] env[62519]: DEBUG nova.compute.provider_tree [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1904.527387] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "a0c60d49-83bb-434b-815c-c39e7493cbb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.527794] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.528086] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "a0c60d49-83bb-434b-815c-c39e7493cbb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.528350] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.528589] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.530729] env[62519]: INFO nova.compute.manager [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Terminating instance [ 1904.849798] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803255, 'name': CreateVM_Task, 'duration_secs': 0.372776} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.850186] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1904.850643] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.850820] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.851197] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1904.851447] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f7d77cc-3003-4ed3-8087-ae37fa90375e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.858270] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1904.858270] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525374ee-4ce3-63c1-aa6a-824fd82fa013" [ 1904.858270] env[62519]: _type = "Task" [ 1904.858270] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.866997] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525374ee-4ce3-63c1-aa6a-824fd82fa013, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.883478] env[62519]: DEBUG nova.network.neutron [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Port 082d375d-5e85-4e5e-a40e-661c492b5f5d binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1904.966952] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.769s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.968627] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.274s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.968802] env[62519]: DEBUG nova.objects.instance [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'resources' on Instance uuid 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1904.994875] env[62519]: INFO nova.scheduler.client.report [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleted allocations for instance 46b3a0fb-29f6-4b66-a091-2d125b69d109 [ 1905.035172] env[62519]: DEBUG nova.compute.manager [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1905.035479] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1905.036411] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c123dfb5-325d-4ef0-80a0-7ebc9257f65e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.045313] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1905.046205] env[62519]: DEBUG nova.network.neutron [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Updated VIF entry in instance network info cache for port d1511a81-0fe4-44f7-b837-a2e080534be6. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1905.046531] env[62519]: DEBUG nova.network.neutron [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Updating instance_info_cache with network_info: [{"id": "d1511a81-0fe4-44f7-b837-a2e080534be6", "address": "fa:16:3e:3c:a8:38", "network": {"id": "b50c1a89-c14c-481c-b5d9-748f18863a45", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-698120386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9808ad7e38e34658aac06ebc932b0e32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1511a81-0f", "ovs_interfaceid": "d1511a81-0fe4-44f7-b837-a2e080534be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.048555] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ced99b88-705a-4bf3-878f-dc0882b8db4e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.141915] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1905.142174] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1905.142367] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleting the datastore file [datastore1] a0c60d49-83bb-434b-815c-c39e7493cbb7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1905.142631] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-575a0d55-eb86-4095-9a89-0df0e54d69e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.151402] env[62519]: DEBUG oslo_vmware.api [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1905.151402] env[62519]: value = "task-1803257" [ 1905.151402] env[62519]: _type = "Task" [ 1905.151402] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.160981] env[62519]: DEBUG oslo_vmware.api [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803257, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.369515] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525374ee-4ce3-63c1-aa6a-824fd82fa013, 'name': SearchDatastore_Task, 'duration_secs': 0.009546} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.369791] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.370122] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1905.370406] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.370584] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.370788] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1905.371097] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13179b6a-bc5e-4d94-beda-62fcabb127dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.380937] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1905.381177] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1905.381901] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06b5eaf9-5925-42de-a06e-0889e6accfc3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.390693] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1905.390693] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52712486-b3de-4420-4dce-6b0913a5e5b1" [ 1905.390693] env[62519]: _type = "Task" [ 1905.390693] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.400086] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52712486-b3de-4420-4dce-6b0913a5e5b1, 'name': SearchDatastore_Task, 'duration_secs': 0.008714} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.400894] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31e47a5b-2ae5-436e-85c5-bb3176b69cc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.406448] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1905.406448] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520c904d-3901-c638-8ab8-8a7860165ea9" [ 1905.406448] env[62519]: _type = "Task" [ 1905.406448] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.415480] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520c904d-3901-c638-8ab8-8a7860165ea9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.446155] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add5994b-a6c0-4415-90fa-8bf8d7f36850 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.478907] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance '4a0f7975-5a07-4593-ae71-cabebdefe0fe' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1905.504031] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d89ebd38-be78-4944-865d-e148c2f80f82 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "46b3a0fb-29f6-4b66-a091-2d125b69d109" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.253s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.551506] env[62519]: DEBUG oslo_concurrency.lockutils [req-56395a8d-9f26-4dcd-a109-a5723ac7fa08 req-2737c4b4-aca5-4770-b71f-5661d0b0b427 service nova] Releasing lock "refresh_cache-4719e47d-dc12-4f9c-aff3-46b083bd7e48" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.663255] env[62519]: DEBUG oslo_vmware.api [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803257, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152921} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.663519] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1905.663708] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1905.663888] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1905.664071] env[62519]: INFO nova.compute.manager [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1905.664319] env[62519]: DEBUG oslo.service.loopingcall [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1905.664507] env[62519]: DEBUG nova.compute.manager [-] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1905.664597] env[62519]: DEBUG nova.network.neutron [-] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1905.705868] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4d4ede-415b-4e1b-a660-4e0bb1016dcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.714301] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6376bc2-95a9-4d60-8c08-4cfadad7ca8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.747034] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9422cafb-920e-439d-af96-521b05e448d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.755532] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab3a6d6-bf11-4b1c-822d-93ee78ef4365 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.771433] env[62519]: DEBUG nova.compute.provider_tree [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1905.907201] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "31db4b14-0ba3-4159-accc-31c21bd81322-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.907455] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.907598] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.921063] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520c904d-3901-c638-8ab8-8a7860165ea9, 'name': SearchDatastore_Task, 'duration_secs': 0.009417} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.921601] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.921851] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4719e47d-dc12-4f9c-aff3-46b083bd7e48/4719e47d-dc12-4f9c-aff3-46b083bd7e48.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1905.922144] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-620b6a78-997e-4219-ba2f-4df720cbee09 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.930597] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1905.930597] env[62519]: value = "task-1803258" [ 1905.930597] env[62519]: _type = "Task" [ 1905.930597] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.940302] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803258, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.979441] env[62519]: DEBUG nova.compute.manager [req-a1cdc8c9-18a0-49de-ba83-bec8a6e2d522 req-5b316e15-f637-4136-9256-7cc265600a14 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Received event network-vif-deleted-421b10d0-d0fc-47f6-b77d-a123639b1c45 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1905.979441] env[62519]: INFO nova.compute.manager [req-a1cdc8c9-18a0-49de-ba83-bec8a6e2d522 req-5b316e15-f637-4136-9256-7cc265600a14 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Neutron deleted interface 421b10d0-d0fc-47f6-b77d-a123639b1c45; detaching it from the instance and deleting it from the info cache [ 1905.979720] env[62519]: DEBUG nova.network.neutron [req-a1cdc8c9-18a0-49de-ba83-bec8a6e2d522 req-5b316e15-f637-4136-9256-7cc265600a14 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.988910] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1905.990026] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ed24c8c-86a4-41f3-bfd3-06c70fd9b501 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.999933] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1905.999933] env[62519]: value = "task-1803259" [ 1905.999933] env[62519]: _type = "Task" [ 1905.999933] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.010503] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803259, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.275067] env[62519]: DEBUG nova.scheduler.client.report [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1906.441863] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803258, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470026} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.441990] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4719e47d-dc12-4f9c-aff3-46b083bd7e48/4719e47d-dc12-4f9c-aff3-46b083bd7e48.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1906.442860] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1906.442860] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3488ce19-047c-42d4-ab7a-ffc77c451814 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.450139] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1906.450139] env[62519]: value = "task-1803260" [ 1906.450139] env[62519]: _type = "Task" [ 1906.450139] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.454185] env[62519]: DEBUG nova.network.neutron [-] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.462299] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803260, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.483683] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a10a8db-eacd-4bf5-a8ae-741f1de56064 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.494624] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3b6b50-e96b-473f-88b8-0477bfed1345 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.517570] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1906.517786] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance '4a0f7975-5a07-4593-ae71-cabebdefe0fe' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1906.535462] env[62519]: DEBUG nova.compute.manager [req-a1cdc8c9-18a0-49de-ba83-bec8a6e2d522 req-5b316e15-f637-4136-9256-7cc265600a14 service nova] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Detach interface failed, port_id=421b10d0-d0fc-47f6-b77d-a123639b1c45, reason: Instance a0c60d49-83bb-434b-815c-c39e7493cbb7 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1906.780142] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.783029] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.865s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.784064] env[62519]: INFO nova.compute.claims [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1906.797856] env[62519]: INFO nova.scheduler.client.report [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleted allocations for instance 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4 [ 1906.943023] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1906.943283] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1906.943571] env[62519]: DEBUG nova.network.neutron [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1906.956397] env[62519]: INFO nova.compute.manager [-] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Took 1.29 seconds to deallocate network for instance. [ 1906.961473] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803260, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071458} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.964075] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1906.964973] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d82fb6-c429-4825-8527-3bb0973c00b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.988051] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 4719e47d-dc12-4f9c-aff3-46b083bd7e48/4719e47d-dc12-4f9c-aff3-46b083bd7e48.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1906.988369] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-932e88ff-3ba3-47ca-9b48-f337ae0ae52d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.011193] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1907.011193] env[62519]: value = "task-1803261" [ 1907.011193] env[62519]: _type = "Task" [ 1907.011193] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.024436] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1907.024674] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1907.024832] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1907.025037] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1907.025191] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1907.025353] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1907.025543] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1907.025726] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1907.025864] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1907.026047] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1907.026224] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1907.031542] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803261, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.032052] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b46af8d5-909b-465b-a8bf-8868194d86ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.049214] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1907.049214] env[62519]: value = "task-1803262" [ 1907.049214] env[62519]: _type = "Task" [ 1907.049214] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.059316] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803262, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.251900] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.252111] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.305902] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b0ebd10-a2b6-4e7f-853d-50efcc4a59c7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "10bfd4ac-6f11-4c96-87a0-ce74bc1193c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.413s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.466101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.527993] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803261, 'name': ReconfigVM_Task, 'duration_secs': 0.323805} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.527993] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 4719e47d-dc12-4f9c-aff3-46b083bd7e48/4719e47d-dc12-4f9c-aff3-46b083bd7e48.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1907.528524] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a262016-29c4-4f2b-aaf5-6d8606b399fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.536206] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1907.536206] env[62519]: value = "task-1803263" [ 1907.536206] env[62519]: _type = "Task" [ 1907.536206] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.544455] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803263, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.559959] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803262, 'name': ReconfigVM_Task, 'duration_secs': 0.175185} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.560335] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance '4a0f7975-5a07-4593-ae71-cabebdefe0fe' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1907.680160] env[62519]: DEBUG nova.network.neutron [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance_info_cache with network_info: [{"id": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "address": "fa:16:3e:4a:40:42", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap082d375d-5e", "ovs_interfaceid": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.754676] env[62519]: DEBUG nova.compute.manager [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1907.991108] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd665fe-ac3f-4c95-b35f-166ca485063f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.999181] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4fb51a-928c-4e63-8d71-e18310ca6620 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.031862] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1908.032472] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1908.035892] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13174975-9fee-4668-b790-3203d63aefe6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.047039] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803263, 'name': Rename_Task, 'duration_secs': 0.142479} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.049144] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1908.049383] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50ef7350-cd4a-40a2-b9d9-7732d03f8616 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.051788] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe4864b-8e43-4959-a185-aa0344456290 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.070697] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1908.070951] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1908.071131] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1908.071319] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1908.071464] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1908.071607] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1908.071804] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1908.071960] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1908.072169] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1908.072302] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1908.072475] env[62519]: DEBUG nova.virt.hardware [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1908.077797] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Reconfiguring VM instance instance-00000056 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1908.078200] env[62519]: DEBUG nova.compute.provider_tree [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1908.080838] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c786ed11-1f21-4cab-bd6d-52c4a25a5c0c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.098807] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1908.098807] env[62519]: value = "task-1803264" [ 1908.098807] env[62519]: _type = "Task" [ 1908.098807] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.100405] env[62519]: DEBUG nova.scheduler.client.report [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1908.111865] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1908.111865] env[62519]: value = "task-1803265" [ 1908.111865] env[62519]: _type = "Task" [ 1908.111865] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.117127] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803264, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.127769] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803265, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.182996] env[62519]: DEBUG oslo_concurrency.lockutils [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.273984] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.545208] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1908.545208] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1908.604665] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.822s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.605219] env[62519]: DEBUG nova.compute.manager [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1908.611625] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.146s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.615022] env[62519]: DEBUG nova.objects.instance [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lazy-loading 'resources' on Instance uuid a0c60d49-83bb-434b-815c-c39e7493cbb7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1908.621051] env[62519]: DEBUG oslo_vmware.api [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803264, 'name': PowerOnVM_Task, 'duration_secs': 0.462077} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.624831] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1908.625054] env[62519]: INFO nova.compute.manager [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Took 6.91 seconds to spawn the instance on the hypervisor. [ 1908.625233] env[62519]: DEBUG nova.compute.manager [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1908.626322] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60404017-c031-4a1b-a783-27888a0a3fcc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.635235] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803265, 'name': ReconfigVM_Task, 'duration_secs': 0.179305} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.636821] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Reconfigured VM instance instance-00000056 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1908.640455] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59a7a28-d39d-44e4-8af8-a97a2f63c954 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.665647] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 4a0f7975-5a07-4593-ae71-cabebdefe0fe/4a0f7975-5a07-4593-ae71-cabebdefe0fe.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1908.666667] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-133174f7-604e-4607-84e4-07a65c8d9fc5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.691169] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1908.691169] env[62519]: value = "task-1803266" [ 1908.691169] env[62519]: _type = "Task" [ 1908.691169] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.692550] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ba93b5-cb27-4e53-92bc-3e321f3a8bb2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.709657] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6998eb-a1ad-4e03-9ecc-4c22e218e96f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.712372] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803266, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.077024] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.077309] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquired lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.077347] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Forcefully refreshing network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1909.113472] env[62519]: DEBUG nova.compute.utils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1909.114810] env[62519]: DEBUG nova.compute.manager [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Not allocating networking since 'none' was specified. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1985}} [ 1909.152343] env[62519]: INFO nova.compute.manager [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Took 14.66 seconds to build instance. [ 1909.201472] env[62519]: DEBUG oslo_vmware.api [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803266, 'name': ReconfigVM_Task, 'duration_secs': 0.27076} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.203972] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 4a0f7975-5a07-4593-ae71-cabebdefe0fe/4a0f7975-5a07-4593-ae71-cabebdefe0fe.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1909.204302] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance '4a0f7975-5a07-4593-ae71-cabebdefe0fe' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1909.348240] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb78c3f-9c46-4ec2-b2b1-75e88957e850 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.356610] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e7ea58-3da7-4000-bb05-5de37ab8baac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.390647] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85146142-b362-4b94-854a-078a78e19bde {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.398536] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dee0800-b2bf-485f-b49c-0bd23afa62c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.412187] env[62519]: DEBUG nova.compute.provider_tree [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1909.616184] env[62519]: DEBUG nova.compute.manager [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1909.715494] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c8d66f-d264-4edc-b45b-a02d499725d8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.734908] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc63b7c-40b0-49d5-b9ef-5cc0fe9c99dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.758503] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance '4a0f7975-5a07-4593-ae71-cabebdefe0fe' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1909.817965] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36162b8f-255a-4421-81fa-697cb5675400 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.838639] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3eeeeb6-b822-4349-8912-c55ac80f3d48 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.844381] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.844622] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "c884a374-ffb8-48db-97bb-d64a687694d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.850426] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance '31db4b14-0ba3-4159-accc-31c21bd81322' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1909.915561] env[62519]: DEBUG nova.scheduler.client.report [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1910.174481] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.319284] env[62519]: DEBUG nova.network.neutron [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Port f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1910.329959] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.346992] env[62519]: DEBUG nova.compute.manager [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1910.356698] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1910.356986] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12898226-6741-4ea2-a265-d5b2e59fe21c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.366205] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1910.366205] env[62519]: value = "task-1803267" [ 1910.366205] env[62519]: _type = "Task" [ 1910.366205] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.374720] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803267, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.422636] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.810s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.424649] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.151s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.426224] env[62519]: INFO nova.compute.claims [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1910.443275] env[62519]: INFO nova.scheduler.client.report [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted allocations for instance a0c60d49-83bb-434b-815c-c39e7493cbb7 [ 1910.626979] env[62519]: DEBUG nova.compute.manager [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1910.653709] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1910.653993] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1910.654177] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1910.654366] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1910.654515] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1910.654664] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1910.654873] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1910.655075] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1910.655305] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1910.655479] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1910.655654] env[62519]: DEBUG nova.virt.hardware [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1910.656732] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa138bd8-c93f-47b1-9497-6b7691280f12 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.659984] env[62519]: DEBUG oslo_concurrency.lockutils [None req-677042db-67a2-490e-9bd6-90a6772c4a22 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.175s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.660266] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.486s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.660483] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.660685] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.660853] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.662974] env[62519]: INFO nova.compute.manager [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Terminating instance [ 1910.668625] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c328ad3-2182-436d-91af-216e91784d3a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.684812] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1910.690448] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Creating folder: Project (c26901fb473b4ea5b690c39212f5e4af). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1910.691024] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf507001-f49c-4a71-a3fe-1eba56ffe40c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.703415] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Created folder: Project (c26901fb473b4ea5b690c39212f5e4af) in parent group-v373567. [ 1910.703624] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Creating folder: Instances. Parent ref: group-v373850. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1910.703910] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5ec2c23-dc54-4d32-ad45-2ab49add6ec4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.716543] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Created folder: Instances in parent group-v373850. [ 1910.716868] env[62519]: DEBUG oslo.service.loopingcall [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1910.717132] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1910.717380] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1f600b3-5ead-48d7-b92f-0c4cff2c35b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.737788] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1910.737788] env[62519]: value = "task-1803270" [ 1910.737788] env[62519]: _type = "Task" [ 1910.737788] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.747472] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803270, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.832567] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Releasing lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.832781] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updated the network info_cache for instance {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10489}} [ 1910.832980] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.833199] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.833367] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.833519] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.833679] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.833825] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.833954] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1910.834112] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.872324] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.879095] env[62519]: DEBUG oslo_vmware.api [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803267, 'name': PowerOnVM_Task, 'duration_secs': 0.39136} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.879719] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1910.879955] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-90b160da-5ad5-46c7-9fd2-3632227a7909 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance '31db4b14-0ba3-4159-accc-31c21bd81322' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1910.952866] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6c0949ca-4a7f-4f08-b64a-76d5d874c03b tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "a0c60d49-83bb-434b-815c-c39e7493cbb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.425s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.168443] env[62519]: DEBUG nova.compute.manager [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1911.168625] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1911.169778] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea63110e-914c-479c-b2e4-4638878a9f56 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.179847] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1911.180176] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-371f4f2c-a108-476d-9ebf-9c8d5789ad88 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.188542] env[62519]: DEBUG oslo_vmware.api [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1911.188542] env[62519]: value = "task-1803271" [ 1911.188542] env[62519]: _type = "Task" [ 1911.188542] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.197312] env[62519]: DEBUG oslo_vmware.api [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803271, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.248278] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803270, 'name': CreateVM_Task, 'duration_secs': 0.280071} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.248474] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1911.248938] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.249137] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.249482] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1911.249756] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf470781-e132-43d1-ac51-dec2d73c836c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.255038] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1911.255038] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e007db-f0af-07ae-33f5-8c82da44091e" [ 1911.255038] env[62519]: _type = "Task" [ 1911.255038] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.264210] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e007db-f0af-07ae-33f5-8c82da44091e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.338690] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.345786] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.346110] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.346339] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.624059] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa778ca3-64c2-4f84-8a3b-3579c85d12ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.632136] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf62b279-26ad-4403-88d4-391ae0e3165c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.663635] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9aff2a-9465-4326-9ac2-4d8aad90f068 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.667788] env[62519]: DEBUG oslo_concurrency.lockutils [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "417588f8-6288-4ecd-9764-dbc923549c5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.668031] env[62519]: DEBUG oslo_concurrency.lockutils [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "417588f8-6288-4ecd-9764-dbc923549c5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.668237] env[62519]: DEBUG oslo_concurrency.lockutils [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "417588f8-6288-4ecd-9764-dbc923549c5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.668426] env[62519]: DEBUG oslo_concurrency.lockutils [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "417588f8-6288-4ecd-9764-dbc923549c5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.668592] env[62519]: DEBUG oslo_concurrency.lockutils [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "417588f8-6288-4ecd-9764-dbc923549c5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.672370] env[62519]: INFO nova.compute.manager [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Terminating instance [ 1911.674462] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793e3f36-37f3-459a-a423-2fdf4592698e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.689007] env[62519]: DEBUG nova.compute.provider_tree [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1911.699319] env[62519]: DEBUG oslo_vmware.api [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803271, 'name': PowerOffVM_Task, 'duration_secs': 0.172733} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.699569] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1911.699735] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1911.700114] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-294a26ba-7f2b-4b00-9a73-6f3ca6e1ff6d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.765237] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e007db-f0af-07ae-33f5-8c82da44091e, 'name': SearchDatastore_Task, 'duration_secs': 0.009195} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.766047] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.766047] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1911.766047] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.766253] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.766292] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1911.766822] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31e30d36-47e0-4442-8fba-1476ad0fb75f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.776539] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1911.776853] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1911.777693] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b55e872f-464a-48b8-9abd-9e7421d7567d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.784641] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1911.784641] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5218af01-a26c-ca39-0ceb-14ececdd05c4" [ 1911.784641] env[62519]: _type = "Task" [ 1911.784641] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.790189] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1911.790488] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1911.790601] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Deleting the datastore file [datastore1] 4719e47d-dc12-4f9c-aff3-46b083bd7e48 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1911.793654] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-535066d0-f2f4-482a-9c24-8be4b294d358 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.795387] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5218af01-a26c-ca39-0ceb-14ececdd05c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.800448] env[62519]: DEBUG oslo_vmware.api [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for the task: (returnval){ [ 1911.800448] env[62519]: value = "task-1803273" [ 1911.800448] env[62519]: _type = "Task" [ 1911.800448] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.808508] env[62519]: DEBUG oslo_vmware.api [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803273, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.180567] env[62519]: DEBUG nova.compute.manager [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1912.180959] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1912.181808] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d03433-c822-4342-8e4b-8d2d95bd575f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.190573] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1912.191526] env[62519]: DEBUG nova.scheduler.client.report [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1912.194648] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fe72472-2137-4d6d-8edd-a7fee2438430 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.202801] env[62519]: DEBUG oslo_vmware.api [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1912.202801] env[62519]: value = "task-1803274" [ 1912.202801] env[62519]: _type = "Task" [ 1912.202801] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.212370] env[62519]: DEBUG oslo_vmware.api [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803274, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.300399] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5218af01-a26c-ca39-0ceb-14ececdd05c4, 'name': SearchDatastore_Task, 'duration_secs': 0.010883} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.301307] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e995890-b66a-4bab-83e9-0e050100a51e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.310104] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1912.310104] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d39564-9b15-d038-3104-f8841ec4dcf1" [ 1912.310104] env[62519]: _type = "Task" [ 1912.310104] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.313166] env[62519]: DEBUG oslo_vmware.api [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Task: {'id': task-1803273, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129956} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.316177] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1912.316440] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1912.316499] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1912.316640] env[62519]: INFO nova.compute.manager [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1912.316878] env[62519]: DEBUG oslo.service.loopingcall [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1912.317090] env[62519]: DEBUG nova.compute.manager [-] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1912.317186] env[62519]: DEBUG nova.network.neutron [-] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1912.325832] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d39564-9b15-d038-3104-f8841ec4dcf1, 'name': SearchDatastore_Task, 'duration_secs': 0.010188} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.326145] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.326426] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446/7bf9ee64-6c45-480e-959f-ff8395b7c446.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1912.326705] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33e0f5e6-b3e6-46d9-893f-64eeb20d1ce2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.334726] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1912.334726] env[62519]: value = "task-1803275" [ 1912.334726] env[62519]: _type = "Task" [ 1912.334726] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.344996] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.393138] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.393395] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.394482] env[62519]: DEBUG nova.network.neutron [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1912.650756] env[62519]: DEBUG nova.compute.manager [req-e1c0e782-de74-47fb-944b-67f0b19896e8 req-df1c1ce1-65ea-43cd-92ed-0035c5d16f2b service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Received event network-vif-deleted-d1511a81-0fe4-44f7-b837-a2e080534be6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1912.651157] env[62519]: INFO nova.compute.manager [req-e1c0e782-de74-47fb-944b-67f0b19896e8 req-df1c1ce1-65ea-43cd-92ed-0035c5d16f2b service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Neutron deleted interface d1511a81-0fe4-44f7-b837-a2e080534be6; detaching it from the instance and deleting it from the info cache [ 1912.651455] env[62519]: DEBUG nova.network.neutron [req-e1c0e782-de74-47fb-944b-67f0b19896e8 req-df1c1ce1-65ea-43cd-92ed-0035c5d16f2b service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.699073] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.699341] env[62519]: DEBUG nova.compute.manager [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1912.702161] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.830s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.703695] env[62519]: INFO nova.compute.claims [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1912.717196] env[62519]: DEBUG oslo_vmware.api [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803274, 'name': PowerOffVM_Task, 'duration_secs': 0.222426} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.717482] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1912.717653] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1912.717950] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e79a221-feec-4808-a033-3700c377f4f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.845140] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478961} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.845414] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446/7bf9ee64-6c45-480e-959f-ff8395b7c446.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1912.845664] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1912.845936] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56e64425-214f-4c2a-a62e-345017001320 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.853929] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1912.853929] env[62519]: value = "task-1803277" [ 1912.853929] env[62519]: _type = "Task" [ 1912.853929] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.863253] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803277, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.911095] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1912.911420] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1912.911626] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleting the datastore file [datastore1] 417588f8-6288-4ecd-9764-dbc923549c5d {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1912.911939] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-327b6cee-cd99-4a97-a640-b1c6a32b9d7d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.923169] env[62519]: DEBUG oslo_vmware.api [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for the task: (returnval){ [ 1912.923169] env[62519]: value = "task-1803278" [ 1912.923169] env[62519]: _type = "Task" [ 1912.923169] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.932406] env[62519]: DEBUG oslo_vmware.api [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.936559] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "31db4b14-0ba3-4159-accc-31c21bd81322" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.936804] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.937023] env[62519]: DEBUG nova.compute.manager [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Going to confirm migration 6 {{(pid=62519) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5235}} [ 1913.127462] env[62519]: DEBUG nova.network.neutron [-] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.135740] env[62519]: DEBUG nova.network.neutron [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance_info_cache with network_info: [{"id": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "address": "fa:16:3e:6c:b5:c8", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8b7229e-2b", "ovs_interfaceid": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.154228] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97839450-df26-4aa8-a0c2-1c148638a44a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.164409] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2830376-4fa3-4281-832e-52d1031ec391 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.198354] env[62519]: DEBUG nova.compute.manager [req-e1c0e782-de74-47fb-944b-67f0b19896e8 req-df1c1ce1-65ea-43cd-92ed-0035c5d16f2b service nova] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Detach interface failed, port_id=d1511a81-0fe4-44f7-b837-a2e080534be6, reason: Instance 4719e47d-dc12-4f9c-aff3-46b083bd7e48 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1913.210983] env[62519]: DEBUG nova.compute.utils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1913.212515] env[62519]: DEBUG nova.compute.manager [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1913.212515] env[62519]: DEBUG nova.network.neutron [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1913.255108] env[62519]: DEBUG nova.policy [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2fdec6fcda84ddeaaa1ee4ba6a58258', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17cd969b1e7d4bd795748560caf80077', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1913.365817] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803277, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074455} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.366770] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1913.367642] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6dff2f7-4038-46a9-afc0-5ca14909ea98 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.390080] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446/7bf9ee64-6c45-480e-959f-ff8395b7c446.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1913.390402] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8ad4101-2439-4efa-8431-4113c0999816 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.412272] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1913.412272] env[62519]: value = "task-1803279" [ 1913.412272] env[62519]: _type = "Task" [ 1913.412272] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.422192] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803279, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.433739] env[62519]: DEBUG oslo_vmware.api [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Task: {'id': task-1803278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159003} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.433903] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1913.434527] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1913.434527] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1913.434527] env[62519]: INFO nova.compute.manager [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1913.434786] env[62519]: DEBUG oslo.service.loopingcall [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1913.434943] env[62519]: DEBUG nova.compute.manager [-] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1913.435072] env[62519]: DEBUG nova.network.neutron [-] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1913.581932] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.582176] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquired lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.582371] env[62519]: DEBUG nova.network.neutron [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1913.582584] env[62519]: DEBUG nova.objects.instance [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lazy-loading 'info_cache' on Instance uuid 31db4b14-0ba3-4159-accc-31c21bd81322 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.630408] env[62519]: INFO nova.compute.manager [-] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Took 1.31 seconds to deallocate network for instance. [ 1913.640552] env[62519]: DEBUG oslo_concurrency.lockutils [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.678826] env[62519]: DEBUG nova.network.neutron [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Successfully created port: 84761f23-74d8-4c26-8600-641c612981dc {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1913.718554] env[62519]: DEBUG nova.compute.manager [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1913.924646] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.956013] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b93ce5e-457b-4f62-9d01-1922721da75c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.965098] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf04aa2-41ec-41b7-8328-6fa29320094d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.996569] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd3ab18-d089-4c6a-b76b-e9f015811cfb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.004546] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2862e152-b6d3-47c3-9243-a9957468d4a9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.018093] env[62519]: DEBUG nova.compute.provider_tree [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1914.143102] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.161446] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddf57f3-83ba-45b3-a986-96a7df101674 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.181800] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a6f64e-a9dd-4361-a839-e945b2c9f171 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.192428] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance '4a0f7975-5a07-4593-ae71-cabebdefe0fe' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1914.422733] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803279, 'name': ReconfigVM_Task, 'duration_secs': 0.827022} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.423061] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446/7bf9ee64-6c45-480e-959f-ff8395b7c446.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1914.423643] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e09e3b80-590b-4df2-9e68-a7fda1908f93 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.432030] env[62519]: DEBUG nova.network.neutron [-] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.433259] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1914.433259] env[62519]: value = "task-1803280" [ 1914.433259] env[62519]: _type = "Task" [ 1914.433259] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.442495] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803280, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.521707] env[62519]: DEBUG nova.scheduler.client.report [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1914.676157] env[62519]: DEBUG nova.compute.manager [req-c83b8226-3ec3-49eb-9155-e15d16fcef60 req-b4bfee7b-db6b-48d9-aefe-76dcb0a8a34b service nova] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Received event network-vif-deleted-f0e6d0f1-55f9-41b4-83ca-fca156a01c8c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1914.701025] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-69e7dbb2-bece-4fab-a6ec-da81bb691da7 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance '4a0f7975-5a07-4593-ae71-cabebdefe0fe' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1914.728308] env[62519]: DEBUG nova.compute.manager [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1914.755267] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1914.755524] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1914.755683] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1914.755877] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1914.756035] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1914.756190] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1914.756398] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1914.756556] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1914.756723] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1914.756882] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1914.757154] env[62519]: DEBUG nova.virt.hardware [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1914.758292] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fbcb28f-5b81-4a41-820d-17efe4d8e002 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.767107] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9365b3-1ea0-4381-9896-e95c38be6690 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.819308] env[62519]: DEBUG nova.network.neutron [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance_info_cache with network_info: [{"id": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "address": "fa:16:3e:4a:40:42", "network": {"id": "ddacabe4-9723-4a0b-9642-4c3ca54cd05f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1355796152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "549cc35f5ff249f6bf22c67872883db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc1e16db-ad3b-4b7f-ab64-4609c87abac0", "external-id": "nsx-vlan-transportzone-500", "segmentation_id": 500, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap082d375d-5e", "ovs_interfaceid": "082d375d-5e85-4e5e-a40e-661c492b5f5d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.938478] env[62519]: INFO nova.compute.manager [-] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Took 1.50 seconds to deallocate network for instance. [ 1914.946332] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803280, 'name': Rename_Task, 'duration_secs': 0.138963} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.946790] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1914.947044] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-033fc777-35e8-4564-8fef-20cc7df0aefe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.954572] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1914.954572] env[62519]: value = "task-1803281" [ 1914.954572] env[62519]: _type = "Task" [ 1914.954572] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.964573] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803281, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.026993] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.325s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.027627] env[62519]: DEBUG nova.compute.manager [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1915.030394] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.692s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.030634] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.030787] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1915.031102] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.888s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.031284] env[62519]: DEBUG nova.objects.instance [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lazy-loading 'resources' on Instance uuid 4719e47d-dc12-4f9c-aff3-46b083bd7e48 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1915.033236] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2c8819-42d8-4381-98ca-17a458e6bd8c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.042164] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b9feb5-2274-47ff-84af-5bd990475519 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.057482] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7ec249-710e-4fa7-8cbf-96e43b6a431f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.067576] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96acd29-ffea-4f18-8199-2b264292a284 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.099316] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179119MB free_disk=158GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1915.099508] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.324180] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Releasing lock "refresh_cache-31db4b14-0ba3-4159-accc-31c21bd81322" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.324180] env[62519]: DEBUG nova.objects.instance [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lazy-loading 'migration_context' on Instance uuid 31db4b14-0ba3-4159-accc-31c21bd81322 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1915.327413] env[62519]: DEBUG nova.network.neutron [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Successfully updated port: 84761f23-74d8-4c26-8600-641c612981dc {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1915.448056] env[62519]: DEBUG oslo_concurrency.lockutils [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.465170] env[62519]: DEBUG oslo_vmware.api [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803281, 'name': PowerOnVM_Task, 'duration_secs': 0.434727} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.465441] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1915.465639] env[62519]: INFO nova.compute.manager [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Took 4.84 seconds to spawn the instance on the hypervisor. [ 1915.465814] env[62519]: DEBUG nova.compute.manager [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1915.466856] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79cd456-0854-4dfd-b4f3-4796b2974ef9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.534873] env[62519]: DEBUG nova.compute.utils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1915.536241] env[62519]: DEBUG nova.compute.manager [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1915.536420] env[62519]: DEBUG nova.network.neutron [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1915.581567] env[62519]: DEBUG nova.policy [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eca5c7b079d4785941d68d7c51df5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63a46158057949478e5c79fbe0d4d5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1915.760746] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890543bd-3f9a-4782-a846-573d14bf24b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.770948] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8a981e-2ff4-408b-9564-be74c8209639 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.805148] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d9709e-6fc1-4d5f-849a-87c24ad6f2e1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.814009] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650bc3f5-a2be-4ea3-92c4-781f37216f8c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.828867] env[62519]: DEBUG nova.objects.base [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Object Instance<31db4b14-0ba3-4159-accc-31c21bd81322> lazy-loaded attributes: info_cache,migration_context {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1915.829463] env[62519]: DEBUG nova.compute.provider_tree [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1915.831067] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "refresh_cache-8e77653a-2e04-4ed7-a419-289bd4b899d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.831690] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "refresh_cache-8e77653a-2e04-4ed7-a419-289bd4b899d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.831690] env[62519]: DEBUG nova.network.neutron [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1915.833131] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39790777-d3b6-4792-b48b-a2ac62d80c54 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.853764] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1baa395d-f55f-4ce7-91b7-b6d8c605551f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.860445] env[62519]: DEBUG oslo_vmware.api [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1915.860445] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a94179-dc64-733b-aa69-6060405dcfda" [ 1915.860445] env[62519]: _type = "Task" [ 1915.860445] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.869649] env[62519]: DEBUG oslo_vmware.api [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a94179-dc64-733b-aa69-6060405dcfda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.877686] env[62519]: DEBUG nova.network.neutron [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Successfully created port: 5a25cf20-af43-4653-8729-93e5e73c5891 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1915.981641] env[62519]: INFO nova.compute.manager [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Took 17.08 seconds to build instance. [ 1916.042324] env[62519]: DEBUG nova.compute.manager [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1916.338750] env[62519]: DEBUG nova.scheduler.client.report [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1916.374746] env[62519]: DEBUG oslo_vmware.api [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a94179-dc64-733b-aa69-6060405dcfda, 'name': SearchDatastore_Task, 'duration_secs': 0.012276} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.376014] env[62519]: DEBUG nova.network.neutron [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1916.380054] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.386319] env[62519]: INFO nova.compute.manager [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Rebuilding instance [ 1916.431425] env[62519]: DEBUG nova.compute.manager [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1916.432505] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2323e2a-5443-4bf2-8fa1-2dd25832148b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.529794] env[62519]: DEBUG nova.network.neutron [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Updating instance_info_cache with network_info: [{"id": "84761f23-74d8-4c26-8600-641c612981dc", "address": "fa:16:3e:c5:1e:8d", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84761f23-74", "ovs_interfaceid": "84761f23-74d8-4c26-8600-641c612981dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.767488] env[62519]: DEBUG nova.compute.manager [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Received event network-vif-plugged-84761f23-74d8-4c26-8600-641c612981dc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1916.767719] env[62519]: DEBUG oslo_concurrency.lockutils [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] Acquiring lock "8e77653a-2e04-4ed7-a419-289bd4b899d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.767940] env[62519]: DEBUG oslo_concurrency.lockutils [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.768135] env[62519]: DEBUG oslo_concurrency.lockutils [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.768327] env[62519]: DEBUG nova.compute.manager [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] No waiting events found dispatching network-vif-plugged-84761f23-74d8-4c26-8600-641c612981dc {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1916.768534] env[62519]: WARNING nova.compute.manager [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Received unexpected event network-vif-plugged-84761f23-74d8-4c26-8600-641c612981dc for instance with vm_state building and task_state spawning. [ 1916.768695] env[62519]: DEBUG nova.compute.manager [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Received event network-changed-84761f23-74d8-4c26-8600-641c612981dc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1916.768864] env[62519]: DEBUG nova.compute.manager [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Refreshing instance network info cache due to event network-changed-84761f23-74d8-4c26-8600-641c612981dc. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1916.769050] env[62519]: DEBUG oslo_concurrency.lockutils [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] Acquiring lock "refresh_cache-8e77653a-2e04-4ed7-a419-289bd4b899d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.808544] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.808811] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.809012] env[62519]: DEBUG nova.compute.manager [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Going to confirm migration 7 {{(pid=62519) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5235}} [ 1916.844740] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.813s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.848013] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.748s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.867673] env[62519]: INFO nova.scheduler.client.report [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Deleted allocations for instance 4719e47d-dc12-4f9c-aff3-46b083bd7e48 [ 1917.033067] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "refresh_cache-8e77653a-2e04-4ed7-a419-289bd4b899d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.033067] env[62519]: DEBUG nova.compute.manager [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Instance network_info: |[{"id": "84761f23-74d8-4c26-8600-641c612981dc", "address": "fa:16:3e:c5:1e:8d", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84761f23-74", "ovs_interfaceid": "84761f23-74d8-4c26-8600-641c612981dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1917.033587] env[62519]: DEBUG oslo_concurrency.lockutils [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] Acquired lock "refresh_cache-8e77653a-2e04-4ed7-a419-289bd4b899d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.033776] env[62519]: DEBUG nova.network.neutron [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Refreshing network info cache for port 84761f23-74d8-4c26-8600-641c612981dc {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1917.035302] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:1e:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89f807d9-140f-4a6f-8bce-96795f9482ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84761f23-74d8-4c26-8600-641c612981dc', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1917.042743] env[62519]: DEBUG oslo.service.loopingcall [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1917.045352] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1917.045791] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-504afd97-0877-4dff-a8ab-92833b0e72ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.061378] env[62519]: DEBUG nova.compute.manager [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1917.069744] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1917.069744] env[62519]: value = "task-1803282" [ 1917.069744] env[62519]: _type = "Task" [ 1917.069744] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.078762] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803282, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.088726] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1917.089013] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1917.089193] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1917.089379] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1917.089524] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1917.089668] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1917.089872] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1917.090093] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1917.090274] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1917.090446] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1917.090607] env[62519]: DEBUG nova.virt.hardware [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1917.091721] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275ffbfa-c859-48de-b225-e1634bc96c64 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.104566] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78933b6a-de18-4484-970a-a0721e3dfc81 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.375114] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7a2cdfee-09b6-4a59-a4de-69719ec50101 tempest-ImagesOneServerNegativeTestJSON-1458472104 tempest-ImagesOneServerNegativeTestJSON-1458472104-project-member] Lock "4719e47d-dc12-4f9c-aff3-46b083bd7e48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.715s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.417714] env[62519]: DEBUG nova.network.neutron [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Updated VIF entry in instance network info cache for port 84761f23-74d8-4c26-8600-641c612981dc. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1917.418147] env[62519]: DEBUG nova.network.neutron [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Updating instance_info_cache with network_info: [{"id": "84761f23-74d8-4c26-8600-641c612981dc", "address": "fa:16:3e:c5:1e:8d", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84761f23-74", "ovs_interfaceid": "84761f23-74d8-4c26-8600-641c612981dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.448369] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.448756] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.449082] env[62519]: DEBUG nova.network.neutron [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1917.449414] env[62519]: DEBUG nova.objects.instance [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'info_cache' on Instance uuid 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1917.455025] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1917.455025] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0495b856-3137-4873-a997-89bdf6b56471 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.462744] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1917.462744] env[62519]: value = "task-1803283" [ 1917.462744] env[62519]: _type = "Task" [ 1917.462744] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.474955] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.490827] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ef66b8f4-b0e5-427c-9948-cfa08fc36f9f tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "7bf9ee64-6c45-480e-959f-ff8395b7c446" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.597s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.580582] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803282, 'name': CreateVM_Task, 'duration_secs': 0.384944} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.580827] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1917.581452] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.581539] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.581861] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1917.582148] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-144e60f3-f6a8-45be-b7cd-6ea90ed37d30 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.588819] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1917.588819] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52557c30-2dd3-29c7-e6eb-4f2e6532a9a2" [ 1917.588819] env[62519]: _type = "Task" [ 1917.588819] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.598501] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52557c30-2dd3-29c7-e6eb-4f2e6532a9a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.626067] env[62519]: DEBUG nova.network.neutron [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Successfully updated port: 5a25cf20-af43-4653-8729-93e5e73c5891 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1917.862435] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Applying migration context for instance 31db4b14-0ba3-4159-accc-31c21bd81322 as it has an incoming, in-progress migration 298ba8a7-ac5c-49c7-8268-5cab43f13b57. Migration status is confirming {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1917.862651] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Applying migration context for instance 4a0f7975-5a07-4593-ae71-cabebdefe0fe as it has an incoming, in-progress migration e85262b2-652f-424f-b78d-dea43ab76545. Migration status is confirming {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1917.864258] env[62519]: INFO nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating resource usage from migration 298ba8a7-ac5c-49c7-8268-5cab43f13b57 [ 1917.864564] env[62519]: INFO nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating resource usage from migration e85262b2-652f-424f-b78d-dea43ab76545 [ 1917.889402] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 11d4a010-959f-4f53-94dc-7499007612ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.889556] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.889700] env[62519]: WARNING nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 417588f8-6288-4ecd-9764-dbc923549c5d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1917.889820] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance ee4b10ba-1c56-47cf-a528-d6e65c286ddb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.889985] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 2ea8304e-5b91-4908-a876-6e2c780b1da9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.890154] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f3665f89-1747-4567-9e56-c937d4ac81da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.890270] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 646c9dfc-7b78-4cdb-b4f5-480c43af38c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.890381] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 618a1db6-4056-4380-b5df-395ac14165a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.890488] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance fc3beaba-2ad5-4598-b562-557fdd552b39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.890599] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Migration 298ba8a7-ac5c-49c7-8268-5cab43f13b57 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1917.890709] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 31db4b14-0ba3-4159-accc-31c21bd81322 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.890816] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Migration e85262b2-652f-424f-b78d-dea43ab76545 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1917.890947] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4a0f7975-5a07-4593-ae71-cabebdefe0fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.891083] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 7bf9ee64-6c45-480e-959f-ff8395b7c446 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.891196] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8e77653a-2e04-4ed7-a419-289bd4b899d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.891302] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c884a374-ffb8-48db-97bb-d64a687694d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1917.891513] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1917.891644] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3520MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1917.925051] env[62519]: DEBUG oslo_concurrency.lockutils [req-3dbff7a8-b917-4b33-9cdf-58b6280867ec req-efa004e4-2fdb-47a1-abd7-ca48deb3b7b8 service nova] Releasing lock "refresh_cache-8e77653a-2e04-4ed7-a419-289bd4b899d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.973764] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803283, 'name': PowerOffVM_Task, 'duration_secs': 0.123882} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.976366] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1917.976622] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1917.977711] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a144dc-970a-4b3d-82fa-61e0af09c7f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.985580] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1917.985580] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2874f08a-ad71-4c70-854b-a2398e56b0a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.023577] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1918.023761] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1918.023954] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Deleting the datastore file [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1918.024253] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebd9444d-c236-4511-b96c-4e507dd39f15 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.032428] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1918.032428] env[62519]: value = "task-1803285" [ 1918.032428] env[62519]: _type = "Task" [ 1918.032428] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.042316] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.103211] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52557c30-2dd3-29c7-e6eb-4f2e6532a9a2, 'name': SearchDatastore_Task, 'duration_secs': 0.020168} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.103726] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.104000] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1918.104252] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.104401] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.104577] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1918.107310] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2691f2e-7b51-4724-aa0e-b49605b39960 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.117299] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1918.117481] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1918.118412] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e86a118-c1c8-4564-b2c7-453290a5adb6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.125684] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1918.125684] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5254fb9d-f598-984c-167c-f1004bceff67" [ 1918.125684] env[62519]: _type = "Task" [ 1918.125684] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.129667] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.129840] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.130225] env[62519]: DEBUG nova.network.neutron [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1918.137431] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5254fb9d-f598-984c-167c-f1004bceff67, 'name': SearchDatastore_Task, 'duration_secs': 0.010316} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.139774] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea2e20c-bb6c-4628-b673-cfe8c29b8933 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.142764] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-282cfa48-3ece-4b41-ae8e-2a36fdc0d225 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.150790] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1918.150790] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c56b13-4f20-cbe6-017b-2475e9c0a41a" [ 1918.150790] env[62519]: _type = "Task" [ 1918.150790] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.152349] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d637bb-2c87-4ddc-810f-80df3ced9091 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.165399] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c56b13-4f20-cbe6-017b-2475e9c0a41a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.195190] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d950a15-7ae1-420e-886e-0eededa118b5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.203638] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c781cf0-3033-49a6-81b3-76013fc9d05d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.219371] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.544358] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163045} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.544678] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1918.544866] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1918.545051] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1918.666094] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c56b13-4f20-cbe6-017b-2475e9c0a41a, 'name': SearchDatastore_Task, 'duration_secs': 0.019343} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.666597] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.666736] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8e77653a-2e04-4ed7-a419-289bd4b899d7/8e77653a-2e04-4ed7-a419-289bd4b899d7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1918.666880] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87bea5b6-e906-4501-b7af-ed8242c0a294 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.674919] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1918.674919] env[62519]: value = "task-1803286" [ 1918.674919] env[62519]: _type = "Task" [ 1918.674919] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.685686] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.723032] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1918.787510] env[62519]: DEBUG nova.network.neutron [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1918.797734] env[62519]: DEBUG nova.compute.manager [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-vif-plugged-5a25cf20-af43-4653-8729-93e5e73c5891 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1918.797734] env[62519]: DEBUG oslo_concurrency.lockutils [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.797734] env[62519]: DEBUG oslo_concurrency.lockutils [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] Lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.797984] env[62519]: DEBUG oslo_concurrency.lockutils [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] Lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.798525] env[62519]: DEBUG nova.compute.manager [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] No waiting events found dispatching network-vif-plugged-5a25cf20-af43-4653-8729-93e5e73c5891 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1918.798525] env[62519]: WARNING nova.compute.manager [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received unexpected event network-vif-plugged-5a25cf20-af43-4653-8729-93e5e73c5891 for instance with vm_state building and task_state spawning. [ 1918.798697] env[62519]: DEBUG nova.compute.manager [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-changed-5a25cf20-af43-4653-8729-93e5e73c5891 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1918.798776] env[62519]: DEBUG nova.compute.manager [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Refreshing instance network info cache due to event network-changed-5a25cf20-af43-4653-8729-93e5e73c5891. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1918.798949] env[62519]: DEBUG oslo_concurrency.lockutils [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] Acquiring lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.005449] env[62519]: DEBUG nova.network.neutron [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance_info_cache with network_info: [{"id": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "address": "fa:16:3e:6c:b5:c8", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8b7229e-2b", "ovs_interfaceid": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.015409] env[62519]: DEBUG nova.network.neutron [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.186803] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803286, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.228282] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1919.228519] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.381s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.228972] env[62519]: DEBUG oslo_concurrency.lockutils [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.781s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.229237] env[62519]: DEBUG oslo_concurrency.lockutils [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.231740] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.852s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.262650] env[62519]: INFO nova.scheduler.client.report [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Deleted allocations for instance 417588f8-6288-4ecd-9764-dbc923549c5d [ 1919.510262] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.510262] env[62519]: DEBUG nova.objects.instance [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'migration_context' on Instance uuid 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1919.521689] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.522863] env[62519]: DEBUG nova.compute.manager [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Instance network_info: |[{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1919.522863] env[62519]: DEBUG oslo_concurrency.lockutils [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] Acquired lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.522863] env[62519]: DEBUG nova.network.neutron [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Refreshing network info cache for port 5a25cf20-af43-4653-8729-93e5e73c5891 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1919.523588] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:6d:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a25cf20-af43-4653-8729-93e5e73c5891', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1919.532957] env[62519]: DEBUG oslo.service.loopingcall [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.535854] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1919.536367] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d978110-ae34-4034-9ba7-d1966cd0a41f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.564385] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1919.564385] env[62519]: value = "task-1803287" [ 1919.564385] env[62519]: _type = "Task" [ 1919.564385] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.577550] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803287, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.591068] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1919.591365] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1919.591563] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1919.591790] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1919.591981] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1919.592234] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1919.592567] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1919.592779] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1919.592993] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1919.593792] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1919.594167] env[62519]: DEBUG nova.virt.hardware [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1919.595132] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e636560-4dbc-4b9e-9bb5-fe38622902a9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.604634] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6e2c82-cf46-4425-b7a3-85c7f19431ff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.624765] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Instance VIF info [] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1919.632474] env[62519]: DEBUG oslo.service.loopingcall [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.632767] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1919.632994] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1110682a-3912-4c0c-8e3a-2c98992d222a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.651851] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1919.651851] env[62519]: value = "task-1803288" [ 1919.651851] env[62519]: _type = "Task" [ 1919.651851] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.662680] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803288, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.687361] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803286, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.833414} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.687646] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 8e77653a-2e04-4ed7-a419-289bd4b899d7/8e77653a-2e04-4ed7-a419-289bd4b899d7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1919.687790] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1919.688098] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be2b297c-9224-4324-9b31-b402127bbfe2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.696903] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1919.696903] env[62519]: value = "task-1803289" [ 1919.696903] env[62519]: _type = "Task" [ 1919.696903] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.705103] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803289, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.773488] env[62519]: DEBUG oslo_concurrency.lockutils [None req-815ba740-1492-49ab-b27d-469d9061ef91 tempest-ServersTestJSON-810763036 tempest-ServersTestJSON-810763036-project-member] Lock "417588f8-6288-4ecd-9764-dbc923549c5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.105s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.841498] env[62519]: DEBUG nova.network.neutron [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updated VIF entry in instance network info cache for port 5a25cf20-af43-4653-8729-93e5e73c5891. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1919.841842] env[62519]: DEBUG nova.network.neutron [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.948235] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680e7c1d-fed3-417c-81be-a11efac1168e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.956345] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9fb3e83-aa65-44fd-a902-b959b7422d8c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.990096] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562b4202-4e88-43b0-9e39-6e0b94d5ca29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.998421] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c5c202-6773-4a2d-a1c0-5f2f8f827aa9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.013751] env[62519]: DEBUG nova.objects.base [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Object Instance<4a0f7975-5a07-4593-ae71-cabebdefe0fe> lazy-loaded attributes: info_cache,migration_context {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1920.014319] env[62519]: DEBUG nova.compute.provider_tree [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1920.016092] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7eff29d-7c4e-4b1f-a55a-a4d23aa0bc70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.039027] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2afaad0d-378b-4010-bf3d-48e7af808a7c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.045519] env[62519]: DEBUG oslo_vmware.api [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1920.045519] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c36ea3-8399-42dd-384f-339b816abeee" [ 1920.045519] env[62519]: _type = "Task" [ 1920.045519] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.055631] env[62519]: DEBUG oslo_vmware.api [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c36ea3-8399-42dd-384f-339b816abeee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.074219] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803287, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.162366] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803288, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.208645] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803289, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07371} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.208996] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1920.209807] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430b9dd0-1abf-4612-8261-54d06e01e5fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.235394] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 8e77653a-2e04-4ed7-a419-289bd4b899d7/8e77653a-2e04-4ed7-a419-289bd4b899d7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1920.235724] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69884297-1f02-4bf4-a508-f8797efc1122 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.258577] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1920.258577] env[62519]: value = "task-1803290" [ 1920.258577] env[62519]: _type = "Task" [ 1920.258577] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.267244] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803290, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.344735] env[62519]: DEBUG oslo_concurrency.lockutils [req-a1698a2c-bef7-4571-a975-8c94a4625544 req-abedf137-37de-4817-bcd8-5770a211cb22 service nova] Releasing lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.520794] env[62519]: DEBUG nova.scheduler.client.report [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1920.560233] env[62519]: DEBUG oslo_vmware.api [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c36ea3-8399-42dd-384f-339b816abeee, 'name': SearchDatastore_Task, 'duration_secs': 0.009507} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.560635] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.576239] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803287, 'name': CreateVM_Task, 'duration_secs': 0.546716} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.576464] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1920.577390] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.577608] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.577973] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1920.578427] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5d3792d-7692-49e4-9392-89d21047901f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.584283] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1920.584283] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520a8193-3304-aa49-2b3b-a798c000a5c7" [ 1920.584283] env[62519]: _type = "Task" [ 1920.584283] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.596282] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520a8193-3304-aa49-2b3b-a798c000a5c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.664381] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803288, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.770469] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803290, 'name': ReconfigVM_Task, 'duration_secs': 0.291141} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.770879] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 8e77653a-2e04-4ed7-a419-289bd4b899d7/8e77653a-2e04-4ed7-a419-289bd4b899d7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1920.772792] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6392b337-050e-44ab-939c-d249c9976a1c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.785449] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1920.785449] env[62519]: value = "task-1803291" [ 1920.785449] env[62519]: _type = "Task" [ 1920.785449] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.792489] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803291, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.100063] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520a8193-3304-aa49-2b3b-a798c000a5c7, 'name': SearchDatastore_Task, 'duration_secs': 0.011894} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.100426] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.100665] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1921.100893] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.101046] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.101230] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1921.101496] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb0c9f21-7cc0-4806-951b-82eefe14422b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.113790] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1921.113983] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1921.115098] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67113716-37ea-4098-83d8-3b0301ca30d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.120719] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1921.120719] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52067355-ff04-fc62-cc00-0636f8dc4fb1" [ 1921.120719] env[62519]: _type = "Task" [ 1921.120719] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.129202] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52067355-ff04-fc62-cc00-0636f8dc4fb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.165181] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803288, 'name': CreateVM_Task, 'duration_secs': 1.228692} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.165359] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1921.165846] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.166015] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.166396] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1921.166686] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b71b7f7-1236-4595-8a98-840e1d6577ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.173230] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1921.173230] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522246a7-7429-39e2-f0f2-2ef65b9f61f6" [ 1921.173230] env[62519]: _type = "Task" [ 1921.173230] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.183738] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522246a7-7429-39e2-f0f2-2ef65b9f61f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.292381] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803291, 'name': Rename_Task, 'duration_secs': 0.16063} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.292789] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1921.293032] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca33302d-e550-4561-be0c-5fd90e5d2468 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.301140] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1921.301140] env[62519]: value = "task-1803292" [ 1921.301140] env[62519]: _type = "Task" [ 1921.301140] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.309436] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803292, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.533828] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.300s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.535324] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.975s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.632504] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52067355-ff04-fc62-cc00-0636f8dc4fb1, 'name': SearchDatastore_Task, 'duration_secs': 0.030632} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.633269] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d382e46-f73c-4b22-8cf9-df4cdff91744 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.641732] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1921.641732] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d3fb24-d75f-96f8-a1a8-220dc7a6ee68" [ 1921.641732] env[62519]: _type = "Task" [ 1921.641732] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.654451] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d3fb24-d75f-96f8-a1a8-220dc7a6ee68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.684134] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522246a7-7429-39e2-f0f2-2ef65b9f61f6, 'name': SearchDatastore_Task, 'duration_secs': 0.027752} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.684467] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.684707] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1921.685115] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.813509] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803292, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.095890] env[62519]: INFO nova.scheduler.client.report [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted allocation for migration 298ba8a7-ac5c-49c7-8268-5cab43f13b57 [ 1922.154584] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d3fb24-d75f-96f8-a1a8-220dc7a6ee68, 'name': SearchDatastore_Task, 'duration_secs': 0.012254} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.154929] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.155250] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c884a374-ffb8-48db-97bb-d64a687694d5/c884a374-ffb8-48db-97bb-d64a687694d5.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1922.155799] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.156058] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1922.156330] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84a36686-eba1-42b9-9443-09e434aac6c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.158700] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba827a29-aea7-4ca2-b8ab-b93041ef313e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.167917] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1922.167917] env[62519]: value = "task-1803293" [ 1922.167917] env[62519]: _type = "Task" [ 1922.167917] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.172234] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1922.172376] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1922.173374] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-349dbc56-37ba-4ced-8735-c47fc00e2701 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.180565] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803293, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.185322] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1922.185322] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526ea2c6-4753-8309-2f6d-f2712e7a4fc6" [ 1922.185322] env[62519]: _type = "Task" [ 1922.185322] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.196388] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526ea2c6-4753-8309-2f6d-f2712e7a4fc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.241557] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7154c46f-eb6e-4359-9eba-4544220ba180 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.250210] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa810417-368f-4816-a26e-854d700e580b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.280938] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf82785-eb32-4649-8952-1e6717351b24 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.288694] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd2b29c-8a31-4771-bb32-a87e83127337 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.303472] env[62519]: DEBUG nova.compute.provider_tree [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1922.312617] env[62519]: DEBUG oslo_vmware.api [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803292, 'name': PowerOnVM_Task, 'duration_secs': 0.535643} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.313434] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1922.313642] env[62519]: INFO nova.compute.manager [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Took 7.59 seconds to spawn the instance on the hypervisor. [ 1922.313822] env[62519]: DEBUG nova.compute.manager [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1922.314584] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83a1851-4f47-4ee1-a5ab-56dd65e0c5cd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.601828] env[62519]: DEBUG oslo_concurrency.lockutils [None req-808b4306-721b-42f7-8b5b-0d7eb6f573b3 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.665s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.680192] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803293, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458171} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.680517] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c884a374-ffb8-48db-97bb-d64a687694d5/c884a374-ffb8-48db-97bb-d64a687694d5.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1922.680778] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1922.681097] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3805620e-05fd-4425-b723-061dccd51c79 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.690708] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1922.690708] env[62519]: value = "task-1803294" [ 1922.690708] env[62519]: _type = "Task" [ 1922.690708] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.699420] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526ea2c6-4753-8309-2f6d-f2712e7a4fc6, 'name': SearchDatastore_Task, 'duration_secs': 0.010213} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.700719] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2eece91-d1ea-403b-8b64-132ab16eb548 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.707735] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803294, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.711155] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1922.711155] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521c54e3-9c83-a9ee-2a55-d4dbb4471b0b" [ 1922.711155] env[62519]: _type = "Task" [ 1922.711155] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.721299] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521c54e3-9c83-a9ee-2a55-d4dbb4471b0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.809141] env[62519]: DEBUG nova.scheduler.client.report [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1922.835591] env[62519]: INFO nova.compute.manager [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Took 14.58 seconds to build instance. [ 1922.991342] env[62519]: INFO nova.compute.manager [None req-bc2e9e4e-a49a-425d-a5a6-092fccc02e2b tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Get console output [ 1922.991696] env[62519]: WARNING nova.virt.vmwareapi.driver [None req-bc2e9e4e-a49a-425d-a5a6-092fccc02e2b tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] The console log is missing. Check your VSPC configuration [ 1923.201831] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803294, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066644} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.203039] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1923.203601] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f26ef86-7404-40bb-b635-e4de0370af4a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.238877] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] c884a374-ffb8-48db-97bb-d64a687694d5/c884a374-ffb8-48db-97bb-d64a687694d5.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1923.244185] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dac76a33-b4f7-4f32-9a2e-f93d8a4e67bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.268942] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]521c54e3-9c83-a9ee-2a55-d4dbb4471b0b, 'name': SearchDatastore_Task, 'duration_secs': 0.009623} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.270736] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.271196] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446/7bf9ee64-6c45-480e-959f-ff8395b7c446.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1923.271565] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1923.271565] env[62519]: value = "task-1803295" [ 1923.271565] env[62519]: _type = "Task" [ 1923.271565] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.271763] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26ec1c1c-82f0-4696-9561-81dbc3b41886 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.285555] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803295, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.287038] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1923.287038] env[62519]: value = "task-1803296" [ 1923.287038] env[62519]: _type = "Task" [ 1923.287038] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.296607] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803296, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.353869] env[62519]: DEBUG nova.compute.manager [req-484cd05e-3feb-4e50-9450-2982baa85169 req-7dd56fe3-9457-4f11-83b8-7a73dc329dc4 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Received event network-changed-84761f23-74d8-4c26-8600-641c612981dc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1923.353869] env[62519]: DEBUG nova.compute.manager [req-484cd05e-3feb-4e50-9450-2982baa85169 req-7dd56fe3-9457-4f11-83b8-7a73dc329dc4 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Refreshing instance network info cache due to event network-changed-84761f23-74d8-4c26-8600-641c612981dc. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1923.353869] env[62519]: DEBUG oslo_concurrency.lockutils [req-484cd05e-3feb-4e50-9450-2982baa85169 req-7dd56fe3-9457-4f11-83b8-7a73dc329dc4 service nova] Acquiring lock "refresh_cache-8e77653a-2e04-4ed7-a419-289bd4b899d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.353869] env[62519]: DEBUG oslo_concurrency.lockutils [req-484cd05e-3feb-4e50-9450-2982baa85169 req-7dd56fe3-9457-4f11-83b8-7a73dc329dc4 service nova] Acquired lock "refresh_cache-8e77653a-2e04-4ed7-a419-289bd4b899d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1923.353869] env[62519]: DEBUG nova.network.neutron [req-484cd05e-3feb-4e50-9450-2982baa85169 req-7dd56fe3-9457-4f11-83b8-7a73dc329dc4 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Refreshing network info cache for port 84761f23-74d8-4c26-8600-641c612981dc {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1923.591474] env[62519]: INFO nova.compute.manager [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Rebuilding instance [ 1923.671258] env[62519]: DEBUG nova.compute.manager [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1923.671258] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58e1444-46c0-48bc-b901-c7376712bf55 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.786112] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803295, 'name': ReconfigVM_Task, 'duration_secs': 0.342635} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.786112] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfigured VM instance instance-0000006f to attach disk [datastore1] c884a374-ffb8-48db-97bb-d64a687694d5/c884a374-ffb8-48db-97bb-d64a687694d5.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1923.786112] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f21ad44-a612-4ef1-aafb-8873978f3f5a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.793434] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1923.793434] env[62519]: value = "task-1803297" [ 1923.793434] env[62519]: _type = "Task" [ 1923.793434] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.797674] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803296, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487774} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.800832] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446/7bf9ee64-6c45-480e-959f-ff8395b7c446.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1923.801252] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1923.801620] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f17fbf95-2f6b-4775-bf48-33cd58297d44 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.810208] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803297, 'name': Rename_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.814164] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1923.814164] env[62519]: value = "task-1803298" [ 1923.814164] env[62519]: _type = "Task" [ 1923.814164] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.820183] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.285s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.821166] env[62519]: DEBUG nova.compute.manager [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62519) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5361}} [ 1923.828413] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803298, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.200632] env[62519]: DEBUG nova.network.neutron [req-484cd05e-3feb-4e50-9450-2982baa85169 req-7dd56fe3-9457-4f11-83b8-7a73dc329dc4 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Updated VIF entry in instance network info cache for port 84761f23-74d8-4c26-8600-641c612981dc. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1924.201108] env[62519]: DEBUG nova.network.neutron [req-484cd05e-3feb-4e50-9450-2982baa85169 req-7dd56fe3-9457-4f11-83b8-7a73dc329dc4 service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Updating instance_info_cache with network_info: [{"id": "84761f23-74d8-4c26-8600-641c612981dc", "address": "fa:16:3e:c5:1e:8d", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84761f23-74", "ovs_interfaceid": "84761f23-74d8-4c26-8600-641c612981dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.311543] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803297, 'name': Rename_Task, 'duration_secs': 0.160114} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.311543] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1924.311543] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60de2cf7-9c13-4e10-9a44-2f00ab5e1e96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.318951] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1924.318951] env[62519]: value = "task-1803299" [ 1924.318951] env[62519]: _type = "Task" [ 1924.318951] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.325627] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070639} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.326317] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1924.327568] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939fcff3-1766-4307-b60c-dccab3325e3a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.340590] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803299, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.356440] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f902135e-725a-40d2-a5a6-ad0367e185a5 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.104s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.366870] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446/7bf9ee64-6c45-480e-959f-ff8395b7c446.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1924.367501] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01fe1dbe-0820-4913-a155-a30fc88d1a51 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.392608] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1924.392608] env[62519]: value = "task-1803300" [ 1924.392608] env[62519]: _type = "Task" [ 1924.392608] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.404234] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.455960] env[62519]: INFO nova.scheduler.client.report [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted allocation for migration e85262b2-652f-424f-b78d-dea43ab76545 [ 1924.688738] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1924.689066] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3dca1fc0-ac3b-4325-942b-c4792da98d05 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.697302] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1924.697302] env[62519]: value = "task-1803301" [ 1924.697302] env[62519]: _type = "Task" [ 1924.697302] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.704074] env[62519]: DEBUG oslo_concurrency.lockutils [req-484cd05e-3feb-4e50-9450-2982baa85169 req-7dd56fe3-9457-4f11-83b8-7a73dc329dc4 service nova] Releasing lock "refresh_cache-8e77653a-2e04-4ed7-a419-289bd4b899d7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.708854] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.830563] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803299, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.907890] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803300, 'name': ReconfigVM_Task, 'duration_secs': 0.332799} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.909233] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446/7bf9ee64-6c45-480e-959f-ff8395b7c446.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1924.912019] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76bf337e-a597-4897-9f3d-ca288062ffab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.922355] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1924.922355] env[62519]: value = "task-1803302" [ 1924.922355] env[62519]: _type = "Task" [ 1924.922355] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.936417] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803302, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.959863] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63cef62c-1e2a-41fc-b23a-b1025187fc04 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.151s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.209343] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803301, 'name': PowerOffVM_Task, 'duration_secs': 0.354075} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.213058] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1925.213058] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1925.213058] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39af709-4cc2-41ca-a552-0c6c89d661ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.220434] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1925.220863] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcb48873-f891-4b06-9a88-8f7d55d30482 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.250380] env[62519]: DEBUG nova.objects.instance [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'flavor' on Instance uuid 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1925.331067] env[62519]: DEBUG oslo_vmware.api [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803299, 'name': PowerOnVM_Task, 'duration_secs': 0.516036} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.331067] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1925.331067] env[62519]: INFO nova.compute.manager [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1925.331310] env[62519]: DEBUG nova.compute.manager [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1925.331922] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128bd415-5cff-4239-814c-892cd05fc358 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.435920] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803302, 'name': Rename_Task, 'duration_secs': 0.1679} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.436279] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1925.436571] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4cf1b8b-70ea-4dad-aede-649d21a3ed1e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.444725] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1925.444725] env[62519]: value = "task-1803304" [ 1925.444725] env[62519]: _type = "Task" [ 1925.444725] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.454145] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803304, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.455900] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1925.456252] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1925.456370] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleting the datastore file [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1925.456686] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62ddf62e-5c90-42b7-86f2-156ee8ff441e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.464333] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1925.464333] env[62519]: value = "task-1803305" [ 1925.464333] env[62519]: _type = "Task" [ 1925.464333] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.475353] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.754552] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.754733] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.754949] env[62519]: DEBUG nova.network.neutron [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1925.755142] env[62519]: DEBUG nova.objects.instance [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'info_cache' on Instance uuid 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1925.773261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.773261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.856183] env[62519]: INFO nova.compute.manager [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Took 15.00 seconds to build instance. [ 1925.955826] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803304, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.976841] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254406} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.977948] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1925.978084] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1925.978327] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1926.258327] env[62519]: DEBUG nova.objects.base [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Object Instance<4a0f7975-5a07-4593-ae71-cabebdefe0fe> lazy-loaded attributes: flavor,info_cache {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1926.276849] env[62519]: DEBUG nova.compute.utils [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1926.461181] env[62519]: DEBUG oslo_vmware.api [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803304, 'name': PowerOnVM_Task, 'duration_secs': 0.554568} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.461648] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1926.462054] env[62519]: DEBUG nova.compute.manager [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1926.464939] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2348076a-b72d-4b0b-87d6-adb2167eb4b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.785833] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.989393] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.992683] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.992683] env[62519]: DEBUG nova.objects.instance [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1927.034412] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1927.035061] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1927.035271] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1927.035497] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1927.035650] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1927.035797] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1927.036017] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1927.036179] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1927.036365] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1927.037048] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1927.037048] env[62519]: DEBUG nova.virt.hardware [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1927.037575] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43b96ce-e179-4369-aba9-fa7dbac6c8c6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.049215] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87834a48-c3b8-496e-94fd-76f14392d6e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.070133] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:f5:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e028024-a9c1-4cae-8849-ea770a7ae0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '872d6b35-c4a4-4975-8757-3c87471ba5df', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1927.083142] env[62519]: DEBUG oslo.service.loopingcall [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1927.088293] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1927.089108] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da775cbf-2241-4885-b12a-f5aae200100d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.119214] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "7bf9ee64-6c45-480e-959f-ff8395b7c446" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.119357] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "7bf9ee64-6c45-480e-959f-ff8395b7c446" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.119656] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "7bf9ee64-6c45-480e-959f-ff8395b7c446-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.119966] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "7bf9ee64-6c45-480e-959f-ff8395b7c446-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.120179] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "7bf9ee64-6c45-480e-959f-ff8395b7c446-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.125760] env[62519]: INFO nova.compute.manager [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Terminating instance [ 1927.135297] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1927.135297] env[62519]: value = "task-1803306" [ 1927.135297] env[62519]: _type = "Task" [ 1927.135297] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.147391] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803306, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.313523] env[62519]: DEBUG nova.network.neutron [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance_info_cache with network_info: [{"id": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "address": "fa:16:3e:6c:b5:c8", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8b7229e-2b", "ovs_interfaceid": "f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.366347] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a0cd48f1-0cb8-4707-94a8-39a1a149cb2f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "c884a374-ffb8-48db-97bb-d64a687694d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.520s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.547035] env[62519]: DEBUG nova.compute.manager [req-6d0ddc9e-9ebe-48f0-94f7-d9f9f0b7a61f req-8b181782-23df-4d45-ae55-9de3c735aa6f service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-changed-5a25cf20-af43-4653-8729-93e5e73c5891 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1927.547204] env[62519]: DEBUG nova.compute.manager [req-6d0ddc9e-9ebe-48f0-94f7-d9f9f0b7a61f req-8b181782-23df-4d45-ae55-9de3c735aa6f service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Refreshing instance network info cache due to event network-changed-5a25cf20-af43-4653-8729-93e5e73c5891. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1927.547456] env[62519]: DEBUG oslo_concurrency.lockutils [req-6d0ddc9e-9ebe-48f0-94f7-d9f9f0b7a61f req-8b181782-23df-4d45-ae55-9de3c735aa6f service nova] Acquiring lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.547634] env[62519]: DEBUG oslo_concurrency.lockutils [req-6d0ddc9e-9ebe-48f0-94f7-d9f9f0b7a61f req-8b181782-23df-4d45-ae55-9de3c735aa6f service nova] Acquired lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.548489] env[62519]: DEBUG nova.network.neutron [req-6d0ddc9e-9ebe-48f0-94f7-d9f9f0b7a61f req-8b181782-23df-4d45-ae55-9de3c735aa6f service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Refreshing network info cache for port 5a25cf20-af43-4653-8729-93e5e73c5891 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1927.631702] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "refresh_cache-7bf9ee64-6c45-480e-959f-ff8395b7c446" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.631915] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquired lock "refresh_cache-7bf9ee64-6c45-480e-959f-ff8395b7c446" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.632083] env[62519]: DEBUG nova.network.neutron [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1927.648110] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803306, 'name': CreateVM_Task, 'duration_secs': 0.378438} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.648110] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1927.648110] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.648110] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.648310] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1927.648601] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64e99628-3f66-4624-a875-e950ba1ff139 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.655594] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1927.655594] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c12b2d-1e8f-648d-9f07-61d93d7b5f2c" [ 1927.655594] env[62519]: _type = "Task" [ 1927.655594] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.666399] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c12b2d-1e8f-648d-9f07-61d93d7b5f2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.818075] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-4a0f7975-5a07-4593-ae71-cabebdefe0fe" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.875191] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.876066] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.876066] env[62519]: INFO nova.compute.manager [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Attaching volume 3d71e984-2dc9-49f0-8bdb-218fb8ee5444 to /dev/sdb [ 1927.914937] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca25d5a-94ac-437d-a7cf-e049a6e2561f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.923842] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6658ab4-a289-44bf-8572-71a45e52a94f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.941941] env[62519]: DEBUG nova.virt.block_device [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Updating existing volume attachment record: 2e826b56-af17-4ae2-8457-83b6d272a49b {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1928.015835] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5ae54b1f-1e4d-44cf-8957-4b8cbc5ab975 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.160212] env[62519]: DEBUG nova.network.neutron [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1928.168304] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52c12b2d-1e8f-648d-9f07-61d93d7b5f2c, 'name': SearchDatastore_Task, 'duration_secs': 0.01535} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.168614] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.168854] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1928.169100] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.169277] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.169481] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1928.169759] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd0eadd9-efea-48ce-b712-8abf3d7555a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.179742] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1928.179954] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1928.180788] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a30f67f3-4544-45ba-9a98-0964327b352b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.188754] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1928.188754] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5247a853-e9ae-783e-213e-5fc03e8b9fc8" [ 1928.188754] env[62519]: _type = "Task" [ 1928.188754] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.198520] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5247a853-e9ae-783e-213e-5fc03e8b9fc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.250688] env[62519]: DEBUG nova.network.neutron [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.324841] env[62519]: DEBUG nova.network.neutron [req-6d0ddc9e-9ebe-48f0-94f7-d9f9f0b7a61f req-8b181782-23df-4d45-ae55-9de3c735aa6f service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updated VIF entry in instance network info cache for port 5a25cf20-af43-4653-8729-93e5e73c5891. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1928.325159] env[62519]: DEBUG nova.network.neutron [req-6d0ddc9e-9ebe-48f0-94f7-d9f9f0b7a61f req-8b181782-23df-4d45-ae55-9de3c735aa6f service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.396682] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquiring lock "884ba724-1203-4513-a196-8af5258ac731" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.396928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "884ba724-1203-4513-a196-8af5258ac731" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.700583] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5247a853-e9ae-783e-213e-5fc03e8b9fc8, 'name': SearchDatastore_Task, 'duration_secs': 0.011247} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.701550] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beda67d0-830d-4cfe-91a7-bf0cc8871bae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.708045] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1928.708045] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52af4285-c821-6494-03fa-f4f4bbbaf973" [ 1928.708045] env[62519]: _type = "Task" [ 1928.708045] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.716752] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52af4285-c821-6494-03fa-f4f4bbbaf973, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.756604] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Releasing lock "refresh_cache-7bf9ee64-6c45-480e-959f-ff8395b7c446" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.757063] env[62519]: DEBUG nova.compute.manager [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1928.757313] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1928.758377] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e568891-a573-4fdd-886a-84a7f5543587 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.766628] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1928.766892] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c527684-1ffd-4fb9-b6d2-913efc90b3a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.773765] env[62519]: DEBUG oslo_vmware.api [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1928.773765] env[62519]: value = "task-1803308" [ 1928.773765] env[62519]: _type = "Task" [ 1928.773765] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.782025] env[62519]: DEBUG oslo_vmware.api [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.827773] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1928.828419] env[62519]: DEBUG oslo_concurrency.lockutils [req-6d0ddc9e-9ebe-48f0-94f7-d9f9f0b7a61f req-8b181782-23df-4d45-ae55-9de3c735aa6f service nova] Releasing lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.828852] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d06e5e8-7b66-47ae-bfdc-5367a6336b95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.837937] env[62519]: DEBUG oslo_vmware.api [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1928.837937] env[62519]: value = "task-1803309" [ 1928.837937] env[62519]: _type = "Task" [ 1928.837937] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.852118] env[62519]: DEBUG oslo_vmware.api [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803309, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.900042] env[62519]: DEBUG nova.compute.manager [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1929.219520] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52af4285-c821-6494-03fa-f4f4bbbaf973, 'name': SearchDatastore_Task, 'duration_secs': 0.046495} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.219520] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.219520] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7/618a1db6-4056-4380-b5df-395ac14165a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1929.219670] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33e61e46-9ed3-4fcc-879b-c2e7633ab2f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.229124] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1929.229124] env[62519]: value = "task-1803310" [ 1929.229124] env[62519]: _type = "Task" [ 1929.229124] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.237991] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803310, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.284439] env[62519]: DEBUG oslo_vmware.api [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803308, 'name': PowerOffVM_Task, 'duration_secs': 0.146837} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.284775] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1929.284973] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1929.285252] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-620c7f89-67fa-443e-b9d8-e50f3ff86280 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.323663] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1929.323890] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1929.324093] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Deleting the datastore file [datastore1] 7bf9ee64-6c45-480e-959f-ff8395b7c446 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1929.324360] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5af4c6d-e18e-42f6-b8f3-2be5b865c856 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.331687] env[62519]: DEBUG oslo_vmware.api [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for the task: (returnval){ [ 1929.331687] env[62519]: value = "task-1803312" [ 1929.331687] env[62519]: _type = "Task" [ 1929.331687] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.339565] env[62519]: DEBUG oslo_vmware.api [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.347454] env[62519]: DEBUG oslo_vmware.api [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803309, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.423208] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.423560] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.425277] env[62519]: INFO nova.compute.claims [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1929.742391] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803310, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.843089] env[62519]: DEBUG oslo_vmware.api [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Task: {'id': task-1803312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268369} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.845431] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1929.845622] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1929.845797] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1929.845966] env[62519]: INFO nova.compute.manager [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1929.846218] env[62519]: DEBUG oslo.service.loopingcall [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1929.846419] env[62519]: DEBUG nova.compute.manager [-] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1929.846515] env[62519]: DEBUG nova.network.neutron [-] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1929.852699] env[62519]: DEBUG oslo_vmware.api [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803309, 'name': PowerOnVM_Task, 'duration_secs': 0.763793} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.852929] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1929.853140] env[62519]: DEBUG nova.compute.manager [None req-75af66fe-360a-4d97-a260-cb1f8a6a6103 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1929.853877] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab1cf41-9aee-4229-abb0-151d0f3b149a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.862983] env[62519]: DEBUG nova.network.neutron [-] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1930.242827] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803310, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515338} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.243143] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7/618a1db6-4056-4380-b5df-395ac14165a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1930.243449] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1930.243738] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05278e09-379c-4c54-abd0-78e70c5d380d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.250683] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1930.250683] env[62519]: value = "task-1803314" [ 1930.250683] env[62519]: _type = "Task" [ 1930.250683] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.260298] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803314, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.366980] env[62519]: DEBUG nova.network.neutron [-] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.589144] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80be2c5-3368-440f-a27d-8be55790064b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.597070] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef68d1d-c621-43a1-8b42-1ce24ef3ee3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.627715] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41d96cd-4757-4445-b24d-510688449364 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.635272] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8da756f-52b1-4cc1-893d-0027c04a4cc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.648789] env[62519]: DEBUG nova.compute.provider_tree [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1930.761425] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803314, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085475} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.761709] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1930.762476] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9323fbdf-01f8-439b-86a2-5003d814ddfd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.784096] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7/618a1db6-4056-4380-b5df-395ac14165a7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1930.784360] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7bfdf39-4c1e-4fea-aef7-9c88f9c60bdd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.805462] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1930.805462] env[62519]: value = "task-1803315" [ 1930.805462] env[62519]: _type = "Task" [ 1930.805462] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.813747] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803315, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.869603] env[62519]: INFO nova.compute.manager [-] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Took 1.02 seconds to deallocate network for instance. [ 1931.171324] env[62519]: ERROR nova.scheduler.client.report [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [req-7b58d36c-78a4-46a7-afea-2d2c76daf3fe] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7b58d36c-78a4-46a7-afea-2d2c76daf3fe"}]} [ 1931.188136] env[62519]: DEBUG nova.scheduler.client.report [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1931.202323] env[62519]: DEBUG nova.scheduler.client.report [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1931.202550] env[62519]: DEBUG nova.compute.provider_tree [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1931.214289] env[62519]: DEBUG nova.scheduler.client.report [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1931.232223] env[62519]: DEBUG nova.scheduler.client.report [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1931.315836] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803315, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.378122] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.409430] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06bbbbb-7343-43ba-8e47-bbf0f716babc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.417905] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fff5fdd-d5bc-4944-9b93-a6b221b8f921 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.449518] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e80f94-5d63-4772-90a0-fbf6e84e543a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.457659] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c6bb68-1f6e-43c1-84dd-61920f541a60 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.470928] env[62519]: DEBUG nova.compute.provider_tree [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1931.483490] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.483605] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.483716] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.483893] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.484069] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.486116] env[62519]: INFO nova.compute.manager [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Terminating instance [ 1931.816724] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803315, 'name': ReconfigVM_Task, 'duration_secs': 0.518617} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.817038] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7/618a1db6-4056-4380-b5df-395ac14165a7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1931.817691] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-61d28ba7-e871-4fae-9501-9a92d490b5d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.826126] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1931.826126] env[62519]: value = "task-1803316" [ 1931.826126] env[62519]: _type = "Task" [ 1931.826126] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.835606] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803316, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.989803] env[62519]: DEBUG nova.compute.manager [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1931.990293] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1931.991307] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb4b826-11bc-4a3c-bf3c-0132835e384f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.999969] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1932.001081] env[62519]: DEBUG nova.scheduler.client.report [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 158 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1932.001255] env[62519]: DEBUG nova.compute.provider_tree [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 158 to 159 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1932.001443] env[62519]: DEBUG nova.compute.provider_tree [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1932.004614] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-815f04c5-ddf0-400a-957c-e4440d25c75c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.012832] env[62519]: DEBUG oslo_vmware.api [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1932.012832] env[62519]: value = "task-1803317" [ 1932.012832] env[62519]: _type = "Task" [ 1932.012832] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.022746] env[62519]: DEBUG oslo_vmware.api [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.337109] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803316, 'name': Rename_Task, 'duration_secs': 0.147025} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.337339] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1932.337582] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13234f8c-5d6d-47b6-bb72-4bf539ff4b29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.344807] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1932.344807] env[62519]: value = "task-1803318" [ 1932.344807] env[62519]: _type = "Task" [ 1932.344807] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.354053] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.486595] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1932.486882] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373857', 'volume_id': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'name': 'volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fc3beaba-2ad5-4598-b562-557fdd552b39', 'attached_at': '', 'detached_at': '', 'volume_id': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'serial': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1932.487836] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec79966-755b-4a6a-b013-80cb05819c9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.505497] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd728fe-d89a-4d15-884a-f995ca628ebc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.508683] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.085s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.509223] env[62519]: DEBUG nova.compute.manager [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1932.512156] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.135s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.512447] env[62519]: DEBUG nova.objects.instance [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lazy-loading 'resources' on Instance uuid 7bf9ee64-6c45-480e-959f-ff8395b7c446 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1932.538070] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444/volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1932.539546] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8461ea80-5462-4091-8b81-cdcbcbad23bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.556915] env[62519]: DEBUG oslo_vmware.api [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803317, 'name': PowerOffVM_Task, 'duration_secs': 0.240324} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.557590] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1932.557802] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1932.558106] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ca6b4ec-8758-434d-8e6e-065e300080ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.563073] env[62519]: DEBUG oslo_vmware.api [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1932.563073] env[62519]: value = "task-1803319" [ 1932.563073] env[62519]: _type = "Task" [ 1932.563073] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.571900] env[62519]: DEBUG oslo_vmware.api [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.645162] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1932.645543] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1932.645866] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleting the datastore file [datastore1] 4a0f7975-5a07-4593-ae71-cabebdefe0fe {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1932.646245] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9db9988a-614c-4dd1-a803-b4c6b6414d44 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.655809] env[62519]: DEBUG oslo_vmware.api [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1932.655809] env[62519]: value = "task-1803321" [ 1932.655809] env[62519]: _type = "Task" [ 1932.655809] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.664764] env[62519]: DEBUG oslo_vmware.api [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.855820] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803318, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.016316] env[62519]: DEBUG nova.compute.utils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1933.019020] env[62519]: DEBUG nova.compute.manager [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1933.019276] env[62519]: DEBUG nova.network.neutron [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1933.075548] env[62519]: DEBUG oslo_vmware.api [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803319, 'name': ReconfigVM_Task, 'duration_secs': 0.390033} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.075905] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfigured VM instance instance-00000069 to attach disk [datastore1] volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444/volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1933.080578] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25018dc7-dbde-4ad5-a39d-3d44888bad96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.092080] env[62519]: DEBUG nova.policy [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64f04c0a7fec4edf926a8ad35f99bc1d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0915798010d42be92cafbcc159a7dd1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1933.102025] env[62519]: DEBUG oslo_vmware.api [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1933.102025] env[62519]: value = "task-1803322" [ 1933.102025] env[62519]: _type = "Task" [ 1933.102025] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.114515] env[62519]: DEBUG oslo_vmware.api [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803322, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.166135] env[62519]: DEBUG oslo_vmware.api [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157195} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.166434] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1933.166659] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1933.166932] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1933.167071] env[62519]: INFO nova.compute.manager [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1933.167389] env[62519]: DEBUG oslo.service.loopingcall [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1933.170220] env[62519]: DEBUG nova.compute.manager [-] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1933.170339] env[62519]: DEBUG nova.network.neutron [-] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1933.255709] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd0f27b-aa88-48e8-96a9-1ccb0120a04c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.263604] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5eb582a-45e9-4c79-97f9-95b67c2fcfbf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.296664] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e78e789-0381-469d-ade7-580930d079d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.305240] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85300227-11e7-4ced-8ff3-1dba08b6a1b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.320908] env[62519]: DEBUG nova.compute.provider_tree [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1933.355259] env[62519]: DEBUG oslo_vmware.api [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803318, 'name': PowerOnVM_Task, 'duration_secs': 0.721187} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.355502] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1933.355832] env[62519]: DEBUG nova.compute.manager [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1933.357056] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b5316c-50a4-4ad4-9d7e-b2e92a150fe2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.511346] env[62519]: DEBUG nova.network.neutron [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Successfully created port: 0bc29684-3b3d-44d3-9d82-f78933d230f1 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1933.522919] env[62519]: DEBUG nova.compute.manager [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1933.612595] env[62519]: DEBUG oslo_vmware.api [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803322, 'name': ReconfigVM_Task, 'duration_secs': 0.154203} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.612863] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373857', 'volume_id': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'name': 'volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fc3beaba-2ad5-4598-b562-557fdd552b39', 'attached_at': '', 'detached_at': '', 'volume_id': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'serial': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1933.854240] env[62519]: DEBUG nova.scheduler.client.report [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 159 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1933.854603] env[62519]: DEBUG nova.compute.provider_tree [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 159 to 160 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1933.855091] env[62519]: DEBUG nova.compute.provider_tree [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1933.875286] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.956707] env[62519]: DEBUG nova.compute.manager [req-63deae38-313f-45ba-90eb-73f50a923bdb req-2dc93619-d3a3-464a-8f8e-1bbbe7d2d1cd service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Received event network-vif-deleted-f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1933.956707] env[62519]: INFO nova.compute.manager [req-63deae38-313f-45ba-90eb-73f50a923bdb req-2dc93619-d3a3-464a-8f8e-1bbbe7d2d1cd service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Neutron deleted interface f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8; detaching it from the instance and deleting it from the info cache [ 1933.956707] env[62519]: DEBUG nova.network.neutron [req-63deae38-313f-45ba-90eb-73f50a923bdb req-2dc93619-d3a3-464a-8f8e-1bbbe7d2d1cd service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.361708] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.848s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.364748] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.489s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.364981] env[62519]: DEBUG nova.objects.instance [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62519) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1934.386263] env[62519]: INFO nova.scheduler.client.report [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Deleted allocations for instance 7bf9ee64-6c45-480e-959f-ff8395b7c446 [ 1934.415314] env[62519]: DEBUG nova.network.neutron [-] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.460051] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb0a2468-ff95-43ca-8dbb-29a094c53570 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.470843] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe7db16-ed45-4b8a-aff3-30de84c0eb97 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.506567] env[62519]: DEBUG nova.compute.manager [req-63deae38-313f-45ba-90eb-73f50a923bdb req-2dc93619-d3a3-464a-8f8e-1bbbe7d2d1cd service nova] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Detach interface failed, port_id=f8b7229e-2b40-4a1b-9e16-0d01dbaa52f8, reason: Instance 4a0f7975-5a07-4593-ae71-cabebdefe0fe could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1934.532234] env[62519]: DEBUG nova.compute.manager [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1934.569850] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1934.569850] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1934.569850] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1934.570128] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1934.570245] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1934.570453] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1934.570615] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1934.570791] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1934.570959] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1934.571145] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1934.571320] env[62519]: DEBUG nova.virt.hardware [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1934.572276] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc48b15-10da-4664-a093-6290b6a94ad6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.582259] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ace573-099c-452d-9074-4c8b33194217 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.652707] env[62519]: DEBUG nova.objects.instance [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid fc3beaba-2ad5-4598-b562-557fdd552b39 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1934.894204] env[62519]: DEBUG oslo_concurrency.lockutils [None req-8cf07187-c8df-4127-958d-5525e209b684 tempest-ServerShowV257Test-1662468933 tempest-ServerShowV257Test-1662468933-project-member] Lock "7bf9ee64-6c45-480e-959f-ff8395b7c446" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.775s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.917935] env[62519]: INFO nova.compute.manager [-] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Took 1.75 seconds to deallocate network for instance. [ 1935.157512] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e4590822-f3b2-48ed-94c4-75949302eaea tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.282s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.222175] env[62519]: DEBUG nova.network.neutron [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Successfully updated port: 0bc29684-3b3d-44d3-9d82-f78933d230f1 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1935.381436] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86476e04-2dcf-4ddc-8b50-25318fb79648 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.425037] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.425385] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.425612] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.459103] env[62519]: INFO nova.scheduler.client.report [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted allocations for instance 4a0f7975-5a07-4593-ae71-cabebdefe0fe [ 1935.655263] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "f3665f89-1747-4567-9e56-c937d4ac81da" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.655694] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.725123] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquiring lock "refresh_cache-884ba724-1203-4513-a196-8af5258ac731" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.725315] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquired lock "refresh_cache-884ba724-1203-4513-a196-8af5258ac731" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.725466] env[62519]: DEBUG nova.network.neutron [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1935.970143] env[62519]: DEBUG oslo_concurrency.lockutils [None req-89837236-916e-48e0-be85-5c1eb0fffb64 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "4a0f7975-5a07-4593-ae71-cabebdefe0fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.486s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.000841] env[62519]: DEBUG nova.compute.manager [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] [instance: 884ba724-1203-4513-a196-8af5258ac731] Received event network-vif-plugged-0bc29684-3b3d-44d3-9d82-f78933d230f1 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1936.001122] env[62519]: DEBUG oslo_concurrency.lockutils [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] Acquiring lock "884ba724-1203-4513-a196-8af5258ac731-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.001352] env[62519]: DEBUG oslo_concurrency.lockutils [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] Lock "884ba724-1203-4513-a196-8af5258ac731-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.001530] env[62519]: DEBUG oslo_concurrency.lockutils [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] Lock "884ba724-1203-4513-a196-8af5258ac731-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.001695] env[62519]: DEBUG nova.compute.manager [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] [instance: 884ba724-1203-4513-a196-8af5258ac731] No waiting events found dispatching network-vif-plugged-0bc29684-3b3d-44d3-9d82-f78933d230f1 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1936.001853] env[62519]: WARNING nova.compute.manager [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] [instance: 884ba724-1203-4513-a196-8af5258ac731] Received unexpected event network-vif-plugged-0bc29684-3b3d-44d3-9d82-f78933d230f1 for instance with vm_state building and task_state spawning. [ 1936.002011] env[62519]: DEBUG nova.compute.manager [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] [instance: 884ba724-1203-4513-a196-8af5258ac731] Received event network-changed-0bc29684-3b3d-44d3-9d82-f78933d230f1 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1936.002248] env[62519]: DEBUG nova.compute.manager [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] [instance: 884ba724-1203-4513-a196-8af5258ac731] Refreshing instance network info cache due to event network-changed-0bc29684-3b3d-44d3-9d82-f78933d230f1. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1936.002408] env[62519]: DEBUG oslo_concurrency.lockutils [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] Acquiring lock "refresh_cache-884ba724-1203-4513-a196-8af5258ac731" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.025227] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.025553] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.159339] env[62519]: INFO nova.compute.manager [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Detaching volume 6af3bcc3-934f-4c24-b76d-cd93f73935c8 [ 1936.192649] env[62519]: INFO nova.virt.block_device [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Attempting to driver detach volume 6af3bcc3-934f-4c24-b76d-cd93f73935c8 from mountpoint /dev/sdb [ 1936.192886] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1936.193108] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373839', 'volume_id': '6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'name': 'volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3665f89-1747-4567-9e56-c937d4ac81da', 'attached_at': '', 'detached_at': '', 'volume_id': '6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'serial': '6af3bcc3-934f-4c24-b76d-cd93f73935c8'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1936.194240] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4759c7f-1f04-456a-b360-dcc25d54bd06 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.217669] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8169d45e-c707-47ca-bf62-2d0b5afeec76 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.225928] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc7b9fd-9874-4e0a-8471-7c9aec32bfa2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.248017] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ff6dcc-363c-49fd-9c03-0217c425cecf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.264879] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] The volume has not been displaced from its original location: [datastore1] volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8/volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1936.270373] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1936.271352] env[62519]: DEBUG nova.network.neutron [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1936.273150] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ac9c7ec-d2be-4c5b-88b2-e6fa7d473d4f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.292856] env[62519]: DEBUG oslo_vmware.api [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1936.292856] env[62519]: value = "task-1803323" [ 1936.292856] env[62519]: _type = "Task" [ 1936.292856] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.301064] env[62519]: DEBUG oslo_vmware.api [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.529076] env[62519]: DEBUG nova.compute.utils [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1936.726756] env[62519]: DEBUG nova.network.neutron [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Updating instance_info_cache with network_info: [{"id": "0bc29684-3b3d-44d3-9d82-f78933d230f1", "address": "fa:16:3e:ee:a9:ac", "network": {"id": "6faa2912-8775-49b4-8111-0a60e8be7a9f", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-561784215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0915798010d42be92cafbcc159a7dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "79c2e589-f55b-4843-8d99-2e565be16706", "external-id": "nsx-vlan-transportzone-858", "segmentation_id": 858, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bc29684-3b", "ovs_interfaceid": "0bc29684-3b3d-44d3-9d82-f78933d230f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.804867] env[62519]: DEBUG oslo_vmware.api [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803323, 'name': ReconfigVM_Task, 'duration_secs': 0.422631} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.805168] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1936.809809] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb20b25a-8493-45c2-8f18-ef02b1a96b74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.827648] env[62519]: DEBUG oslo_vmware.api [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1936.827648] env[62519]: value = "task-1803324" [ 1936.827648] env[62519]: _type = "Task" [ 1936.827648] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.839490] env[62519]: DEBUG oslo_vmware.api [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803324, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.032820] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.230683] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Releasing lock "refresh_cache-884ba724-1203-4513-a196-8af5258ac731" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.231059] env[62519]: DEBUG nova.compute.manager [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Instance network_info: |[{"id": "0bc29684-3b3d-44d3-9d82-f78933d230f1", "address": "fa:16:3e:ee:a9:ac", "network": {"id": "6faa2912-8775-49b4-8111-0a60e8be7a9f", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-561784215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0915798010d42be92cafbcc159a7dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "79c2e589-f55b-4843-8d99-2e565be16706", "external-id": "nsx-vlan-transportzone-858", "segmentation_id": 858, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bc29684-3b", "ovs_interfaceid": "0bc29684-3b3d-44d3-9d82-f78933d230f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1937.231455] env[62519]: DEBUG oslo_concurrency.lockutils [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] Acquired lock "refresh_cache-884ba724-1203-4513-a196-8af5258ac731" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.231548] env[62519]: DEBUG nova.network.neutron [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] [instance: 884ba724-1203-4513-a196-8af5258ac731] Refreshing network info cache for port 0bc29684-3b3d-44d3-9d82-f78933d230f1 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1937.232767] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:a9:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '79c2e589-f55b-4843-8d99-2e565be16706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0bc29684-3b3d-44d3-9d82-f78933d230f1', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1937.241760] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Creating folder: Project (c0915798010d42be92cafbcc159a7dd1). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1937.242825] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09f3cfaa-1fa2-46a1-9630-93a618af2a7e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.257844] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Created folder: Project (c0915798010d42be92cafbcc159a7dd1) in parent group-v373567. [ 1937.258142] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Creating folder: Instances. Parent ref: group-v373858. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1937.258288] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd5fad2c-d80e-4721-a99a-61cbbd9d73fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.268370] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Created folder: Instances in parent group-v373858. [ 1937.268618] env[62519]: DEBUG oslo.service.loopingcall [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1937.268808] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 884ba724-1203-4513-a196-8af5258ac731] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1937.269029] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0effda24-3168-4a3b-a37b-ce6cc68ddcd3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.289362] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1937.289362] env[62519]: value = "task-1803327" [ 1937.289362] env[62519]: _type = "Task" [ 1937.289362] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.297373] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803327, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.337242] env[62519]: DEBUG oslo_vmware.api [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803324, 'name': ReconfigVM_Task, 'duration_secs': 0.227989} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.337552] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373839', 'volume_id': '6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'name': 'volume-6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f3665f89-1747-4567-9e56-c937d4ac81da', 'attached_at': '', 'detached_at': '', 'volume_id': '6af3bcc3-934f-4c24-b76d-cd93f73935c8', 'serial': '6af3bcc3-934f-4c24-b76d-cd93f73935c8'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1937.651760] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "9bf88b2f-63f9-466b-8669-45f17319055d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.651760] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.801034] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803327, 'name': CreateVM_Task, 'duration_secs': 0.414172} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.803964] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 884ba724-1203-4513-a196-8af5258ac731] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1937.804715] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.804926] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.805235] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1937.805495] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d319a4b2-4bf6-452d-a468-d75b963fc46a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.811570] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1937.811570] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f5ef01-e40a-c52f-3e71-dce8306ac1e2" [ 1937.811570] env[62519]: _type = "Task" [ 1937.811570] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.826070] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f5ef01-e40a-c52f-3e71-dce8306ac1e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.890365] env[62519]: DEBUG nova.objects.instance [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lazy-loading 'flavor' on Instance uuid f3665f89-1747-4567-9e56-c937d4ac81da {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1937.991881] env[62519]: DEBUG nova.network.neutron [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] [instance: 884ba724-1203-4513-a196-8af5258ac731] Updated VIF entry in instance network info cache for port 0bc29684-3b3d-44d3-9d82-f78933d230f1. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1937.992295] env[62519]: DEBUG nova.network.neutron [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] [instance: 884ba724-1203-4513-a196-8af5258ac731] Updating instance_info_cache with network_info: [{"id": "0bc29684-3b3d-44d3-9d82-f78933d230f1", "address": "fa:16:3e:ee:a9:ac", "network": {"id": "6faa2912-8775-49b4-8111-0a60e8be7a9f", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-561784215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0915798010d42be92cafbcc159a7dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "79c2e589-f55b-4843-8d99-2e565be16706", "external-id": "nsx-vlan-transportzone-858", "segmentation_id": 858, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bc29684-3b", "ovs_interfaceid": "0bc29684-3b3d-44d3-9d82-f78933d230f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.108655] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.108885] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1938.109091] env[62519]: INFO nova.compute.manager [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Attaching volume 8a192ee5-7f6b-43f4-baac-71e4e69cc484 to /dev/sdc [ 1938.142979] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d836080f-aa3b-4b3f-8ad6-33a4b24c7ef6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.151248] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62d84f9-9e85-4478-b0b7-9de303957931 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.157177] env[62519]: DEBUG nova.compute.manager [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1938.173019] env[62519]: DEBUG nova.virt.block_device [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Updating existing volume attachment record: d14fa39a-68e0-4091-9cb4-19f1178094b9 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1938.325035] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f5ef01-e40a-c52f-3e71-dce8306ac1e2, 'name': SearchDatastore_Task, 'duration_secs': 0.012433} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.325673] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.326153] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1938.326517] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.327928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.327928] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1938.327928] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08f1cd6a-d2a8-4d47-8d3a-c3b495fba808 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.343672] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1938.343864] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1938.344618] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ab23fa3-7364-420e-b842-4e7e69ec265b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.351610] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1938.351610] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522d5a8e-aa8f-4f15-36f3-ec3beeee3acc" [ 1938.351610] env[62519]: _type = "Task" [ 1938.351610] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.361116] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522d5a8e-aa8f-4f15-36f3-ec3beeee3acc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.500785] env[62519]: DEBUG oslo_concurrency.lockutils [req-63d8ce42-8934-4eed-b805-bf97134d95e7 req-6fe29ba1-3979-4bf8-a336-26df8f41b0dc service nova] Releasing lock "refresh_cache-884ba724-1203-4513-a196-8af5258ac731" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.682193] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.682344] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1938.684144] env[62519]: INFO nova.compute.claims [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1938.865769] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522d5a8e-aa8f-4f15-36f3-ec3beeee3acc, 'name': SearchDatastore_Task, 'duration_secs': 0.010777} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.866720] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b50da4e-7b63-405b-8eef-ad64bea2c654 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.873759] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1938.873759] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f98c70-0713-7326-bdb0-af5cf2e9d22b" [ 1938.873759] env[62519]: _type = "Task" [ 1938.873759] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.886922] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f98c70-0713-7326-bdb0-af5cf2e9d22b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.900627] env[62519]: DEBUG oslo_concurrency.lockutils [None req-a68c47f8-30b9-478d-897e-faa39bbdf33b tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.245s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.302423] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.302676] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.385856] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52f98c70-0713-7326-bdb0-af5cf2e9d22b, 'name': SearchDatastore_Task, 'duration_secs': 0.072632} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.388245] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1939.388245] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 884ba724-1203-4513-a196-8af5258ac731/884ba724-1203-4513-a196-8af5258ac731.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1939.388245] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-847b9701-fd96-45ef-b8e3-e9a9da748945 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.396133] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1939.396133] env[62519]: value = "task-1803329" [ 1939.396133] env[62519]: _type = "Task" [ 1939.396133] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.405594] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803329, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.808423] env[62519]: DEBUG nova.compute.manager [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1939.893372] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248bd0b5-69c2-4c3b-b70a-4f3ceba47cb2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.909889] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4793f465-02ff-4251-8b97-0414793a19cd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.914138] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803329, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.944509] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c75a86e-6e9e-4971-91e4-e9ebf12c6b57 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.954024] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c25d3d1-a7cd-44d6-8251-60392cfef836 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.970167] env[62519]: DEBUG nova.compute.provider_tree [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1940.025537] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "f3665f89-1747-4567-9e56-c937d4ac81da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.025833] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.026058] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "f3665f89-1747-4567-9e56-c937d4ac81da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.026246] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.026419] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.028855] env[62519]: INFO nova.compute.manager [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Terminating instance [ 1940.327514] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.408101] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803329, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.745938} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.408415] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 884ba724-1203-4513-a196-8af5258ac731/884ba724-1203-4513-a196-8af5258ac731.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1940.408657] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1940.408955] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2aaa2130-f0cc-41ae-ac76-d910dee87573 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.417385] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1940.417385] env[62519]: value = "task-1803331" [ 1940.417385] env[62519]: _type = "Task" [ 1940.417385] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.427069] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803331, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.473346] env[62519]: DEBUG nova.scheduler.client.report [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1940.533084] env[62519]: DEBUG nova.compute.manager [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1940.533319] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1940.534215] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4736d174-af17-4b56-8240-881260c24ccc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.543329] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1940.543621] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a82c1e2-7cbb-40f2-96eb-19422197bba9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.551602] env[62519]: DEBUG oslo_vmware.api [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1940.551602] env[62519]: value = "task-1803332" [ 1940.551602] env[62519]: _type = "Task" [ 1940.551602] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.562260] env[62519]: DEBUG oslo_vmware.api [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.928270] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803331, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064744} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.929218] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1940.929592] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fccc9b5-1698-4c75-ab84-a19f2eb8b6a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.954107] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 884ba724-1203-4513-a196-8af5258ac731/884ba724-1203-4513-a196-8af5258ac731.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1940.954479] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8d2d1b9-fce9-4eb0-aafc-dfb486de2f83 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.977915] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.978574] env[62519]: DEBUG nova.compute.manager [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1940.981523] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1940.981523] env[62519]: value = "task-1803333" [ 1940.981523] env[62519]: _type = "Task" [ 1940.981523] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.982039] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.655s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.983650] env[62519]: INFO nova.compute.claims [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1940.998982] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.063256] env[62519]: DEBUG oslo_vmware.api [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803332, 'name': PowerOffVM_Task, 'duration_secs': 0.379625} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.063565] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1941.063801] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1941.064135] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-048fd94a-12b8-4790-a6ca-33d266c6450a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.149249] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1941.149568] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1941.149772] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Deleting the datastore file [datastore1] f3665f89-1747-4567-9e56-c937d4ac81da {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1941.150100] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6241132e-ba82-431c-8835-e7bdb38f396e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.157717] env[62519]: DEBUG oslo_vmware.api [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1941.157717] env[62519]: value = "task-1803335" [ 1941.157717] env[62519]: _type = "Task" [ 1941.157717] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.166160] env[62519]: DEBUG oslo_vmware.api [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.484586] env[62519]: DEBUG nova.compute.utils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1941.485842] env[62519]: DEBUG nova.compute.manager [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1941.485990] env[62519]: DEBUG nova.network.neutron [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1941.499576] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803333, 'name': ReconfigVM_Task, 'duration_secs': 0.314959} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.500113] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 884ba724-1203-4513-a196-8af5258ac731/884ba724-1203-4513-a196-8af5258ac731.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1941.500744] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccca65d3-760e-4a87-affd-20d1c9202f50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.508651] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1941.508651] env[62519]: value = "task-1803336" [ 1941.508651] env[62519]: _type = "Task" [ 1941.508651] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.519241] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803336, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.541149] env[62519]: DEBUG nova.policy [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '158d93cf743840a8be4e192e6bd4b8bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd73e71476254453fb23164dce09c6d41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1941.669413] env[62519]: DEBUG oslo_vmware.api [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157208} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.669643] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1941.669924] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1941.670042] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1941.670261] env[62519]: INFO nova.compute.manager [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1941.670534] env[62519]: DEBUG oslo.service.loopingcall [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1941.670763] env[62519]: DEBUG nova.compute.manager [-] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1941.670867] env[62519]: DEBUG nova.network.neutron [-] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1941.940759] env[62519]: DEBUG nova.network.neutron [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Successfully created port: 2ee072d4-2bdb-4a83-90dd-06086f515634 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1941.988685] env[62519]: DEBUG nova.compute.manager [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1942.034779] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803336, 'name': Rename_Task, 'duration_secs': 0.201081} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.035700] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1942.035956] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6facff5-293c-4ec7-a8d8-d13174e6bba7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.044906] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1942.044906] env[62519]: value = "task-1803337" [ 1942.044906] env[62519]: _type = "Task" [ 1942.044906] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.060583] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.236338] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688b380a-26cf-4f07-b913-afa028738f7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.245136] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d067fa2-4686-4757-9fcb-efe5b7027ccd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.276027] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc309b3-6218-4d4b-bc65-72613bbdf7ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.283921] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ce3f11-7181-4266-bc55-f5e3dbe525b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.299484] env[62519]: DEBUG nova.compute.provider_tree [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1942.429072] env[62519]: DEBUG nova.compute.manager [req-680e2550-ae79-47e3-b37e-ff5cfe039a31 req-7e9332a5-6218-4d1b-bb5d-196c58e7d4a4 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Received event network-vif-deleted-7220339b-d4a7-441b-8227-c956744ce0c0 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1942.429277] env[62519]: INFO nova.compute.manager [req-680e2550-ae79-47e3-b37e-ff5cfe039a31 req-7e9332a5-6218-4d1b-bb5d-196c58e7d4a4 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Neutron deleted interface 7220339b-d4a7-441b-8227-c956744ce0c0; detaching it from the instance and deleting it from the info cache [ 1942.429821] env[62519]: DEBUG nova.network.neutron [req-680e2550-ae79-47e3-b37e-ff5cfe039a31 req-7e9332a5-6218-4d1b-bb5d-196c58e7d4a4 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.555671] env[62519]: DEBUG oslo_vmware.api [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803337, 'name': PowerOnVM_Task, 'duration_secs': 0.473691} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.555984] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1942.556257] env[62519]: INFO nova.compute.manager [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Took 8.02 seconds to spawn the instance on the hypervisor. [ 1942.556470] env[62519]: DEBUG nova.compute.manager [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1942.557277] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b02d75-a913-4a17-a522-4ae89263cee6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.716601] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1942.716843] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373861', 'volume_id': '8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'name': 'volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fc3beaba-2ad5-4598-b562-557fdd552b39', 'attached_at': '', 'detached_at': '', 'volume_id': '8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'serial': '8a192ee5-7f6b-43f4-baac-71e4e69cc484'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1942.720512] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa72cfa-d612-4d42-9b85-2c23190e6fbb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.735408] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85cff67-1d7e-4ecc-ad67-75f873893c65 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.763505] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484/volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1942.763809] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0a3dc76-73e5-4bb7-82f2-0316db64dd50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.783280] env[62519]: DEBUG oslo_vmware.api [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1942.783280] env[62519]: value = "task-1803338" [ 1942.783280] env[62519]: _type = "Task" [ 1942.783280] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.792237] env[62519]: DEBUG oslo_vmware.api [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.802291] env[62519]: DEBUG nova.scheduler.client.report [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1942.903374] env[62519]: DEBUG nova.network.neutron [-] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.933615] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81a64825-0ba7-4f88-9e71-c7096f47af60 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.946282] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25f2516-e80c-40ce-a91c-ad3015495fc8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.979479] env[62519]: DEBUG nova.compute.manager [req-680e2550-ae79-47e3-b37e-ff5cfe039a31 req-7e9332a5-6218-4d1b-bb5d-196c58e7d4a4 service nova] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Detach interface failed, port_id=7220339b-d4a7-441b-8227-c956744ce0c0, reason: Instance f3665f89-1747-4567-9e56-c937d4ac81da could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1942.998463] env[62519]: DEBUG nova.compute.manager [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1943.027615] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1943.027861] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1943.028026] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1943.028238] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1943.028397] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1943.028545] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1943.028752] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1943.028910] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1943.029084] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1943.029247] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1943.029483] env[62519]: DEBUG nova.virt.hardware [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1943.030383] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c67542-2a74-4fcb-a3b1-9289e5ecac2a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.038129] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14beafc-43a8-49a6-883b-a8d168d59dac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.078209] env[62519]: INFO nova.compute.manager [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Took 13.67 seconds to build instance. [ 1943.293700] env[62519]: DEBUG oslo_vmware.api [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803338, 'name': ReconfigVM_Task, 'duration_secs': 0.379785} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.293997] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfigured VM instance instance-00000069 to attach disk [datastore1] volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484/volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1943.298805] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4652b9de-f653-4dc5-ac81-5155b932642a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.309540] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.310083] env[62519]: DEBUG nova.compute.manager [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1943.320020] env[62519]: DEBUG oslo_vmware.api [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1943.320020] env[62519]: value = "task-1803339" [ 1943.320020] env[62519]: _type = "Task" [ 1943.320020] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.328876] env[62519]: DEBUG oslo_vmware.api [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803339, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.405740] env[62519]: INFO nova.compute.manager [-] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Took 1.73 seconds to deallocate network for instance. [ 1943.669719] env[62519]: DEBUG nova.network.neutron [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Successfully updated port: 2ee072d4-2bdb-4a83-90dd-06086f515634 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1943.819050] env[62519]: DEBUG nova.compute.utils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1943.820833] env[62519]: DEBUG nova.compute.manager [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1943.820833] env[62519]: DEBUG nova.network.neutron [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1944.530621] env[62519]: DEBUG nova.policy [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e3681fd31294e5cbdfbfea52a47eec0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff4895c6c47e438e8fb9fbc0ffbfdc82', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1944.533590] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.533859] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.534093] env[62519]: DEBUG nova.objects.instance [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lazy-loading 'resources' on Instance uuid f3665f89-1747-4567-9e56-c937d4ac81da {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1944.536110] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1944.536242] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1944.536376] env[62519]: DEBUG nova.network.neutron [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1944.537484] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquiring lock "884ba724-1203-4513-a196-8af5258ac731" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.537857] env[62519]: DEBUG nova.compute.manager [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1944.540358] env[62519]: DEBUG oslo_vmware.api [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803339, 'name': ReconfigVM_Task, 'duration_secs': 0.15234} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.541311] env[62519]: DEBUG nova.compute.manager [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Received event network-vif-plugged-2ee072d4-2bdb-4a83-90dd-06086f515634 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1944.541518] env[62519]: DEBUG oslo_concurrency.lockutils [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] Acquiring lock "9bf88b2f-63f9-466b-8669-45f17319055d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.541705] env[62519]: DEBUG oslo_concurrency.lockutils [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] Lock "9bf88b2f-63f9-466b-8669-45f17319055d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.541867] env[62519]: DEBUG oslo_concurrency.lockutils [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] Lock "9bf88b2f-63f9-466b-8669-45f17319055d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.542065] env[62519]: DEBUG nova.compute.manager [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] No waiting events found dispatching network-vif-plugged-2ee072d4-2bdb-4a83-90dd-06086f515634 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1944.542329] env[62519]: WARNING nova.compute.manager [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Received unexpected event network-vif-plugged-2ee072d4-2bdb-4a83-90dd-06086f515634 for instance with vm_state building and task_state spawning. [ 1944.542497] env[62519]: DEBUG nova.compute.manager [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Received event network-changed-2ee072d4-2bdb-4a83-90dd-06086f515634 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1944.542928] env[62519]: DEBUG nova.compute.manager [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Refreshing instance network info cache due to event network-changed-2ee072d4-2bdb-4a83-90dd-06086f515634. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1944.543110] env[62519]: DEBUG oslo_concurrency.lockutils [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] Acquiring lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1944.544195] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373861', 'volume_id': '8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'name': 'volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fc3beaba-2ad5-4598-b562-557fdd552b39', 'attached_at': '', 'detached_at': '', 'volume_id': '8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'serial': '8a192ee5-7f6b-43f4-baac-71e4e69cc484'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1944.805096] env[62519]: DEBUG nova.network.neutron [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Successfully created port: 9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1945.053824] env[62519]: DEBUG oslo_concurrency.lockutils [None req-218c7ba7-bbdd-414d-9aa4-4b5c78a99c20 tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "884ba724-1203-4513-a196-8af5258ac731" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.657s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.056388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "884ba724-1203-4513-a196-8af5258ac731" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.519s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.059670] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquiring lock "884ba724-1203-4513-a196-8af5258ac731-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.059670] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "884ba724-1203-4513-a196-8af5258ac731-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.059670] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "884ba724-1203-4513-a196-8af5258ac731-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.059670] env[62519]: INFO nova.compute.manager [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Terminating instance [ 1945.101574] env[62519]: DEBUG nova.network.neutron [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1945.236081] env[62519]: DEBUG nova.network.neutron [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [{"id": "2ee072d4-2bdb-4a83-90dd-06086f515634", "address": "fa:16:3e:e6:86:ec", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee072d4-2b", "ovs_interfaceid": "2ee072d4-2bdb-4a83-90dd-06086f515634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1945.254530] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469795f1-56f8-4e71-b96c-51708330db86 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.265402] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0524cf-0ed1-4cf4-85f5-963fa1ca593f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.299374] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99ebb74-0c16-4dd1-9d98-f48c6e25e9f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.308082] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385dbdcb-d32d-489b-997d-8e19e43edcde {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.323900] env[62519]: DEBUG nova.compute.provider_tree [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1945.556570] env[62519]: DEBUG nova.compute.manager [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1945.563376] env[62519]: DEBUG nova.compute.manager [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1945.563585] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1945.564527] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dee7093-39e5-4091-8819-0e91cb278fa4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.573842] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1945.575656] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84d266b5-f662-4fc9-b187-71fe05159baa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.583381] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1945.583571] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1945.583775] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1945.583899] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1945.584054] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1945.584206] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1945.584410] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1945.584564] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1945.584725] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1945.584882] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1945.585065] env[62519]: DEBUG nova.virt.hardware [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1945.585829] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab670bf-a479-452a-8567-674817925140 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.589701] env[62519]: DEBUG oslo_vmware.api [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1945.589701] env[62519]: value = "task-1803340" [ 1945.589701] env[62519]: _type = "Task" [ 1945.589701] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.599816] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dee607f-d4ab-4b61-81a4-52d71e0f3fdc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.604340] env[62519]: DEBUG oslo_vmware.api [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803340, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.614580] env[62519]: DEBUG nova.objects.instance [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid fc3beaba-2ad5-4598-b562-557fdd552b39 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1945.739325] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.739865] env[62519]: DEBUG nova.compute.manager [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Instance network_info: |[{"id": "2ee072d4-2bdb-4a83-90dd-06086f515634", "address": "fa:16:3e:e6:86:ec", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee072d4-2b", "ovs_interfaceid": "2ee072d4-2bdb-4a83-90dd-06086f515634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1945.740479] env[62519]: DEBUG oslo_concurrency.lockutils [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] Acquired lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.740742] env[62519]: DEBUG nova.network.neutron [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Refreshing network info cache for port 2ee072d4-2bdb-4a83-90dd-06086f515634 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1945.742358] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:86:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ee072d4-2bdb-4a83-90dd-06086f515634', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1945.752497] env[62519]: DEBUG oslo.service.loopingcall [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1945.753618] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1945.753870] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57e071e5-f648-41ab-b1f9-3266a2ede719 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.776960] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1945.776960] env[62519]: value = "task-1803341" [ 1945.776960] env[62519]: _type = "Task" [ 1945.776960] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.786538] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803341, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.827694] env[62519]: DEBUG nova.scheduler.client.report [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1946.100815] env[62519]: DEBUG oslo_vmware.api [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803340, 'name': PowerOffVM_Task, 'duration_secs': 0.196369} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.101118] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1946.101291] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1946.101663] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2681e64b-43fa-41ae-8d78-ed47746c3f37 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.120766] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4e989c08-4a0b-495a-846a-60d593f74fdc tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.012s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.219530] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1946.219795] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1946.220013] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Deleting the datastore file [datastore1] 884ba724-1203-4513-a196-8af5258ac731 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1946.220551] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33f95239-83d3-4611-817e-33f1a212900d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.229194] env[62519]: DEBUG oslo_vmware.api [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for the task: (returnval){ [ 1946.229194] env[62519]: value = "task-1803343" [ 1946.229194] env[62519]: _type = "Task" [ 1946.229194] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.238807] env[62519]: DEBUG oslo_vmware.api [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803343, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.293011] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803341, 'name': CreateVM_Task, 'duration_secs': 0.484159} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.293228] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1946.293883] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.294058] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.294435] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1946.294640] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be79fc4b-d736-49cf-9644-ddca8c3fd214 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.300306] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1946.300306] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bc1ff-c303-5a71-cdde-34d7d6b43c89" [ 1946.300306] env[62519]: _type = "Task" [ 1946.300306] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.313862] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]520bc1ff-c303-5a71-cdde-34d7d6b43c89, 'name': SearchDatastore_Task} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.314154] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.314387] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1946.314615] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.314755] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.314925] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1946.315456] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-628bbb63-39d0-4f68-bac9-8489e96c4a3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.325046] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1946.325245] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1946.326549] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9a82755-7a18-49a8-808c-98a60a742395 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.333146] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.799s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.335266] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1946.335266] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5232d1de-e7b5-3239-cd9b-1b413dce0cad" [ 1946.335266] env[62519]: _type = "Task" [ 1946.335266] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.339389] env[62519]: DEBUG nova.network.neutron [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Successfully updated port: 9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1946.348221] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5232d1de-e7b5-3239-cd9b-1b413dce0cad, 'name': SearchDatastore_Task, 'duration_secs': 0.009866} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.349039] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5049397-448e-4fbe-a634-ab060b48fa3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.359096] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1946.359096] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5293babf-229b-ff28-0d6d-9bcdb9d47f2d" [ 1946.359096] env[62519]: _type = "Task" [ 1946.359096] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.366470] env[62519]: INFO nova.scheduler.client.report [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Deleted allocations for instance f3665f89-1747-4567-9e56-c937d4ac81da [ 1946.373727] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5293babf-229b-ff28-0d6d-9bcdb9d47f2d, 'name': SearchDatastore_Task, 'duration_secs': 0.00921} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.374202] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.374455] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 9bf88b2f-63f9-466b-8669-45f17319055d/9bf88b2f-63f9-466b-8669-45f17319055d.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1946.374787] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-190bd9f8-d808-4aec-ae37-6fff08c91da6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.386245] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1946.386245] env[62519]: value = "task-1803344" [ 1946.386245] env[62519]: _type = "Task" [ 1946.386245] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.395917] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803344, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.489627] env[62519]: DEBUG nova.compute.manager [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received event network-vif-plugged-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1946.489899] env[62519]: DEBUG oslo_concurrency.lockutils [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] Acquiring lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.490081] env[62519]: DEBUG oslo_concurrency.lockutils [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.490260] env[62519]: DEBUG oslo_concurrency.lockutils [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.490448] env[62519]: DEBUG nova.compute.manager [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] No waiting events found dispatching network-vif-plugged-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1946.490675] env[62519]: WARNING nova.compute.manager [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received unexpected event network-vif-plugged-9a08cba9-bd48-4013-a6c0-13049c0b5fdb for instance with vm_state building and task_state spawning. [ 1946.490858] env[62519]: DEBUG nova.compute.manager [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received event network-changed-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1946.491019] env[62519]: DEBUG nova.compute.manager [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Refreshing instance network info cache due to event network-changed-9a08cba9-bd48-4013-a6c0-13049c0b5fdb. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1946.491206] env[62519]: DEBUG oslo_concurrency.lockutils [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] Acquiring lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.491363] env[62519]: DEBUG oslo_concurrency.lockutils [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] Acquired lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.491513] env[62519]: DEBUG nova.network.neutron [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Refreshing network info cache for port 9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1946.507163] env[62519]: DEBUG nova.network.neutron [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updated VIF entry in instance network info cache for port 2ee072d4-2bdb-4a83-90dd-06086f515634. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1946.507517] env[62519]: DEBUG nova.network.neutron [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [{"id": "2ee072d4-2bdb-4a83-90dd-06086f515634", "address": "fa:16:3e:e6:86:ec", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee072d4-2b", "ovs_interfaceid": "2ee072d4-2bdb-4a83-90dd-06086f515634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.682862] env[62519]: DEBUG oslo_concurrency.lockutils [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.683279] env[62519]: DEBUG oslo_concurrency.lockutils [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.739823] env[62519]: DEBUG oslo_vmware.api [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Task: {'id': task-1803343, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16059} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.740104] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1946.740363] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1946.740602] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1946.740867] env[62519]: INFO nova.compute.manager [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] [instance: 884ba724-1203-4513-a196-8af5258ac731] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1946.741229] env[62519]: DEBUG oslo.service.loopingcall [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1946.741468] env[62519]: DEBUG nova.compute.manager [-] [instance: 884ba724-1203-4513-a196-8af5258ac731] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1946.741601] env[62519]: DEBUG nova.network.neutron [-] [instance: 884ba724-1203-4513-a196-8af5258ac731] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1946.842939] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.878665] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dbaa8fb2-ac82-42c0-a4eb-9cc78c7ddbac tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "f3665f89-1747-4567-9e56-c937d4ac81da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.852s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.901671] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803344, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.009566] env[62519]: DEBUG oslo_concurrency.lockutils [req-1df02851-f731-49e6-b41d-943cac9d2adc req-f10b3971-1009-4ef1-b60f-e179523120e7 service nova] Releasing lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1947.037803] env[62519]: DEBUG nova.network.neutron [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1947.117877] env[62519]: DEBUG nova.network.neutron [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.185909] env[62519]: INFO nova.compute.manager [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Detaching volume 3d71e984-2dc9-49f0-8bdb-218fb8ee5444 [ 1947.230737] env[62519]: INFO nova.virt.block_device [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Attempting to driver detach volume 3d71e984-2dc9-49f0-8bdb-218fb8ee5444 from mountpoint /dev/sdb [ 1947.230981] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1947.231197] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373857', 'volume_id': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'name': 'volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fc3beaba-2ad5-4598-b562-557fdd552b39', 'attached_at': '', 'detached_at': '', 'volume_id': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'serial': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1947.232273] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8127830-df1f-4c6f-8baa-176be5338467 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.257967] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b41938-d849-4311-bc02-9aa18b7d63f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.267512] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b5a7ec-6739-4945-b104-8f97dd8a0846 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.293801] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ccf9e9-609f-4a1c-9446-eaf20207097a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.311407] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] The volume has not been displaced from its original location: [datastore1] volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444/volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1947.317767] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1947.317767] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72ec5b86-3dc4-41cf-b65f-b096aac98a7a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.337580] env[62519]: DEBUG oslo_vmware.api [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1947.337580] env[62519]: value = "task-1803345" [ 1947.337580] env[62519]: _type = "Task" [ 1947.337580] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.349935] env[62519]: DEBUG oslo_vmware.api [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803345, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.402723] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803344, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520522} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.402781] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 9bf88b2f-63f9-466b-8669-45f17319055d/9bf88b2f-63f9-466b-8669-45f17319055d.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1947.402973] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1947.403271] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3fc2f349-eebe-48c1-8f12-ee0280121f16 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.412887] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1947.412887] env[62519]: value = "task-1803346" [ 1947.412887] env[62519]: _type = "Task" [ 1947.412887] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.422440] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.543360] env[62519]: DEBUG nova.network.neutron [-] [instance: 884ba724-1203-4513-a196-8af5258ac731] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.621311] env[62519]: DEBUG oslo_concurrency.lockutils [req-8a60a59f-e504-40dc-baed-3e6f1cbff32f req-074b1db1-3900-43e9-bbce-9cda133713c7 service nova] Releasing lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1947.622107] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1947.622353] env[62519]: DEBUG nova.network.neutron [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1947.850828] env[62519]: DEBUG oslo_vmware.api [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803345, 'name': ReconfigVM_Task, 'duration_secs': 0.497595} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.850828] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1947.853967] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be5606a3-eb81-4704-9e8f-ca5ab8cc0b74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.871141] env[62519]: DEBUG oslo_vmware.api [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1947.871141] env[62519]: value = "task-1803348" [ 1947.871141] env[62519]: _type = "Task" [ 1947.871141] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.880374] env[62519]: DEBUG oslo_vmware.api [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803348, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.922903] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077049} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.923192] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1947.923971] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70377f1c-a3f7-4afa-a5bb-911c239bfe70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.947689] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 9bf88b2f-63f9-466b-8669-45f17319055d/9bf88b2f-63f9-466b-8669-45f17319055d.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1947.948024] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23bff52c-8f90-4fe1-b9fb-12122fca5f98 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.969187] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1947.969187] env[62519]: value = "task-1803349" [ 1947.969187] env[62519]: _type = "Task" [ 1947.969187] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.977932] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803349, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.017037] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "31db4b14-0ba3-4159-accc-31c21bd81322" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.017037] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.017220] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "31db4b14-0ba3-4159-accc-31c21bd81322-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.017327] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.017501] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.019825] env[62519]: INFO nova.compute.manager [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Terminating instance [ 1948.045658] env[62519]: INFO nova.compute.manager [-] [instance: 884ba724-1203-4513-a196-8af5258ac731] Took 1.30 seconds to deallocate network for instance. [ 1948.154968] env[62519]: DEBUG nova.network.neutron [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1948.293330] env[62519]: DEBUG nova.network.neutron [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating instance_info_cache with network_info: [{"id": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "address": "fa:16:3e:82:31:6c", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a08cba9-bd", "ovs_interfaceid": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1948.383433] env[62519]: DEBUG oslo_vmware.api [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803348, 'name': ReconfigVM_Task, 'duration_secs': 0.249141} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.383725] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373857', 'volume_id': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'name': 'volume-3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fc3beaba-2ad5-4598-b562-557fdd552b39', 'attached_at': '', 'detached_at': '', 'volume_id': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444', 'serial': '3d71e984-2dc9-49f0-8bdb-218fb8ee5444'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1948.479937] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803349, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.514883] env[62519]: DEBUG nova.compute.manager [req-25b8ab8a-51c5-44e3-98a4-aa4c7ef9776d req-2ef8999c-4259-44dc-8c07-69fba4b2ba03 service nova] [instance: 884ba724-1203-4513-a196-8af5258ac731] Received event network-vif-deleted-0bc29684-3b3d-44d3-9d82-f78933d230f1 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1948.524377] env[62519]: DEBUG nova.compute.manager [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1948.524892] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1948.526039] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-637c8f33-1e14-4b06-80ee-b39c057b9d81 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.534332] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1948.534332] env[62519]: value = "task-1803350" [ 1948.534332] env[62519]: _type = "Task" [ 1948.534332] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.543067] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.551851] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.552166] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.552401] env[62519]: DEBUG nova.objects.instance [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lazy-loading 'resources' on Instance uuid 884ba724-1203-4513-a196-8af5258ac731 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1948.797067] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.797067] env[62519]: DEBUG nova.compute.manager [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Instance network_info: |[{"id": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "address": "fa:16:3e:82:31:6c", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a08cba9-bd", "ovs_interfaceid": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1948.797506] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:31:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a08cba9-bd48-4013-a6c0-13049c0b5fdb', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1948.805279] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating folder: Project (ff4895c6c47e438e8fb9fbc0ffbfdc82). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1948.805636] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9479280-c5e3-473d-a5da-25cf83d96364 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.818661] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Created folder: Project (ff4895c6c47e438e8fb9fbc0ffbfdc82) in parent group-v373567. [ 1948.818851] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating folder: Instances. Parent ref: group-v373863. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1948.819101] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1854fbe5-2986-49a6-adb8-a7aab0325dcc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.830500] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Created folder: Instances in parent group-v373863. [ 1948.830839] env[62519]: DEBUG oslo.service.loopingcall [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1948.831063] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1948.831276] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb743907-8a4f-4b15-8161-500776ea85dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.850679] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1948.850679] env[62519]: value = "task-1803353" [ 1948.850679] env[62519]: _type = "Task" [ 1948.850679] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.858706] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803353, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.932062] env[62519]: DEBUG nova.objects.instance [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid fc3beaba-2ad5-4598-b562-557fdd552b39 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1948.981212] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803349, 'name': ReconfigVM_Task, 'duration_secs': 0.694042} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.981649] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 9bf88b2f-63f9-466b-8669-45f17319055d/9bf88b2f-63f9-466b-8669-45f17319055d.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1948.982353] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3789621d-08da-4972-ab9f-eb41076cc5e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.989493] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1948.989493] env[62519]: value = "task-1803354" [ 1948.989493] env[62519]: _type = "Task" [ 1948.989493] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.998639] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803354, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.044790] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803350, 'name': PowerOffVM_Task, 'duration_secs': 0.197763} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.045029] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1949.045187] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1949.045377] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373835', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'name': 'volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '31db4b14-0ba3-4159-accc-31c21bd81322', 'attached_at': '2024-12-12T08:19:52.000000', 'detached_at': '', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'serial': '84bda086-0de7-4b24-8a1f-6ff7c11594b6'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1949.046153] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b3afdf-79b0-48a1-88a4-0016ddb0d54e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.068609] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590ec4c9-0087-43ef-9ea1-2d35af9c57c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.075918] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75b1f85-96c4-46e6-b05b-880fda84bc8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.099640] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bed10d0-4e7f-4d05-99d5-1ebc9a1dfdbb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.115648] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] The volume has not been displaced from its original location: [datastore1] volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6/volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1949.121039] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Reconfiguring VM instance instance-0000006a to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1949.123806] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ceebd5e-7a86-42ce-9be9-62ac795cfd46 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.144336] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1949.144336] env[62519]: value = "task-1803355" [ 1949.144336] env[62519]: _type = "Task" [ 1949.144336] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.156158] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803355, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.292125] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befda731-dcd7-4de9-a84c-7e4175d25ac5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.300504] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b371cf92-9827-4950-94bd-e26f6343697e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.332221] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36a64a5-6b4c-475e-8182-9767f1fde421 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.341624] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141f45de-8e6f-4347-9c44-57387f9631d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.356667] env[62519]: DEBUG nova.compute.provider_tree [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1949.368346] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803353, 'name': CreateVM_Task, 'duration_secs': 0.471404} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.369174] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1949.370771] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.370943] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.371325] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1949.371893] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4a2c22d-af2f-474d-9936-3e78968366ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.377166] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1949.377166] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529aac19-81e0-9893-2875-ef9fbab0b1c3" [ 1949.377166] env[62519]: _type = "Task" [ 1949.377166] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.387858] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529aac19-81e0-9893-2875-ef9fbab0b1c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.499507] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803354, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.654288] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803355, 'name': ReconfigVM_Task, 'duration_secs': 0.198576} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.654615] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Reconfigured VM instance instance-0000006a to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1949.659247] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a03c5dc-e8df-4b53-9aee-8577e8bc0b22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.675915] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1949.675915] env[62519]: value = "task-1803356" [ 1949.675915] env[62519]: _type = "Task" [ 1949.675915] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.685294] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.863498] env[62519]: DEBUG nova.scheduler.client.report [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1949.889015] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529aac19-81e0-9893-2875-ef9fbab0b1c3, 'name': SearchDatastore_Task, 'duration_secs': 0.011583} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.889363] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.889675] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1949.889936] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.890129] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.890351] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1949.890616] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-501fc03e-81d4-4342-a0ef-0ea9286b6499 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.901048] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1949.901048] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1949.901190] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3023c446-abc5-4c22-a546-90d3a7fb098e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.907211] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1949.907211] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cbcfe0-da54-14d1-fc70-c13c246e8320" [ 1949.907211] env[62519]: _type = "Task" [ 1949.907211] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.915736] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cbcfe0-da54-14d1-fc70-c13c246e8320, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.939906] env[62519]: DEBUG oslo_concurrency.lockutils [None req-20736fb5-73ae-4991-8bae-57131eac8b4c tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.257s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.953788] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.954046] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.002605] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803354, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.186910] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803356, 'name': ReconfigVM_Task, 'duration_secs': 0.27241} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.187258] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373835', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'name': 'volume-84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '31db4b14-0ba3-4159-accc-31c21bd81322', 'attached_at': '2024-12-12T08:19:52.000000', 'detached_at': '', 'volume_id': '84bda086-0de7-4b24-8a1f-6ff7c11594b6', 'serial': '84bda086-0de7-4b24-8a1f-6ff7c11594b6'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1950.187539] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1950.188324] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0941442c-6b69-434a-902b-506a8a9bb0bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.195309] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1950.195949] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59dea1e3-d11d-44ed-9cb5-8436f919f61d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.355034] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1950.355034] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1950.355034] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleting the datastore file [datastore1] 31db4b14-0ba3-4159-accc-31c21bd81322 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1950.355034] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-016e6662-81c7-4f0a-83f9-4d59a497f574 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.360754] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1950.360754] env[62519]: value = "task-1803358" [ 1950.360754] env[62519]: _type = "Task" [ 1950.360754] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.368854] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.817s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.370798] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803358, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.387120] env[62519]: INFO nova.scheduler.client.report [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Deleted allocations for instance 884ba724-1203-4513-a196-8af5258ac731 [ 1950.419479] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cbcfe0-da54-14d1-fc70-c13c246e8320, 'name': SearchDatastore_Task, 'duration_secs': 0.010007} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.420295] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ff7c280-7d10-4bd0-9714-1e13df55ef19 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.426016] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1950.426016] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527d356c-93e4-912a-7512-a26b6af62056" [ 1950.426016] env[62519]: _type = "Task" [ 1950.426016] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.434865] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527d356c-93e4-912a-7512-a26b6af62056, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.457072] env[62519]: INFO nova.compute.manager [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Detaching volume 8a192ee5-7f6b-43f4-baac-71e4e69cc484 [ 1950.490869] env[62519]: INFO nova.virt.block_device [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Attempting to driver detach volume 8a192ee5-7f6b-43f4-baac-71e4e69cc484 from mountpoint /dev/sdc [ 1950.491149] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1950.491317] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373861', 'volume_id': '8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'name': 'volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fc3beaba-2ad5-4598-b562-557fdd552b39', 'attached_at': '', 'detached_at': '', 'volume_id': '8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'serial': '8a192ee5-7f6b-43f4-baac-71e4e69cc484'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1950.492225] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07eb4181-f394-4512-bd3d-938f0aa1fda8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.503976] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803354, 'name': Rename_Task, 'duration_secs': 1.126163} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.518879] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1950.519518] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72020faf-293b-4a4b-8ce2-80f2e96bd3fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.521554] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558988f1-4c54-4491-909f-8d000468f9b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.529163] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bb5af8-9ebd-4bef-bdd0-e75abbff32f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.532560] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1950.532560] env[62519]: value = "task-1803359" [ 1950.532560] env[62519]: _type = "Task" [ 1950.532560] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.552335] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6161e1-d51f-4828-aaca-abec75d5bd92 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.557884] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803359, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.572070] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] The volume has not been displaced from its original location: [datastore1] volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484/volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1950.577461] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfiguring VM instance instance-00000069 to detach disk 2002 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1950.577786] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d364dabd-48e9-4b59-bac1-b1b091104de8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.596929] env[62519]: DEBUG oslo_vmware.api [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1950.596929] env[62519]: value = "task-1803360" [ 1950.596929] env[62519]: _type = "Task" [ 1950.596929] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.608581] env[62519]: DEBUG oslo_vmware.api [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803360, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.871673] env[62519]: DEBUG oslo_vmware.api [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803358, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098727} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.872011] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1950.872126] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1950.872300] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1950.872484] env[62519]: INFO nova.compute.manager [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Took 2.35 seconds to destroy the instance on the hypervisor. [ 1950.872732] env[62519]: DEBUG oslo.service.loopingcall [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1950.872925] env[62519]: DEBUG nova.compute.manager [-] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1950.873028] env[62519]: DEBUG nova.network.neutron [-] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1950.894864] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f782d2b8-e50d-40ef-8bd5-13bb744038de tempest-ServersNegativeTestMultiTenantJSON-1721524338 tempest-ServersNegativeTestMultiTenantJSON-1721524338-project-member] Lock "884ba724-1203-4513-a196-8af5258ac731" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.838s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.939426] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]527d356c-93e4-912a-7512-a26b6af62056, 'name': SearchDatastore_Task, 'duration_secs': 0.010141} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.939761] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.940088] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a/47439070-54d8-454c-bf1d-7a2a33d82e9a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1950.940306] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34cee2ba-9dfc-45bc-937b-087662badcc1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.948632] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1950.948632] env[62519]: value = "task-1803361" [ 1950.948632] env[62519]: _type = "Task" [ 1950.948632] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.958120] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803361, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.043949] env[62519]: DEBUG oslo_vmware.api [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803359, 'name': PowerOnVM_Task, 'duration_secs': 0.485908} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.044283] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1951.044540] env[62519]: INFO nova.compute.manager [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Took 8.05 seconds to spawn the instance on the hypervisor. [ 1951.044749] env[62519]: DEBUG nova.compute.manager [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1951.045589] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecf686d-b350-411b-be28-0635f3d1e4e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.107092] env[62519]: DEBUG oslo_vmware.api [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803360, 'name': ReconfigVM_Task, 'duration_secs': 0.278922} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.107402] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Reconfigured VM instance instance-00000069 to detach disk 2002 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1951.112509] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51affd9b-3f49-4a0b-a470-7c9e71c1b3bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.132365] env[62519]: DEBUG oslo_vmware.api [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1951.132365] env[62519]: value = "task-1803362" [ 1951.132365] env[62519]: _type = "Task" [ 1951.132365] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.142929] env[62519]: DEBUG oslo_vmware.api [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803362, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.369791] env[62519]: DEBUG nova.compute.manager [req-657cb994-c290-4ccb-b5cf-b36f8b668f83 req-5e967a86-4911-4c0a-b716-7306057828e0 service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Received event network-vif-deleted-082d375d-5e85-4e5e-a40e-661c492b5f5d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1951.369888] env[62519]: INFO nova.compute.manager [req-657cb994-c290-4ccb-b5cf-b36f8b668f83 req-5e967a86-4911-4c0a-b716-7306057828e0 service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Neutron deleted interface 082d375d-5e85-4e5e-a40e-661c492b5f5d; detaching it from the instance and deleting it from the info cache [ 1951.370092] env[62519]: DEBUG nova.network.neutron [req-657cb994-c290-4ccb-b5cf-b36f8b668f83 req-5e967a86-4911-4c0a-b716-7306057828e0 service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1951.460757] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803361, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472312} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.461076] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a/47439070-54d8-454c-bf1d-7a2a33d82e9a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1951.461325] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1951.461598] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df04c181-a2b9-4945-ab2d-db249e54a387 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.469887] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1951.469887] env[62519]: value = "task-1803363" [ 1951.469887] env[62519]: _type = "Task" [ 1951.469887] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.479442] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.564760] env[62519]: INFO nova.compute.manager [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Took 12.90 seconds to build instance. [ 1951.644895] env[62519]: DEBUG oslo_vmware.api [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803362, 'name': ReconfigVM_Task, 'duration_secs': 0.346235} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.645135] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373861', 'volume_id': '8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'name': 'volume-8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fc3beaba-2ad5-4598-b562-557fdd552b39', 'attached_at': '', 'detached_at': '', 'volume_id': '8a192ee5-7f6b-43f4-baac-71e4e69cc484', 'serial': '8a192ee5-7f6b-43f4-baac-71e4e69cc484'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1951.841128] env[62519]: DEBUG nova.network.neutron [-] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1951.873603] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-616b6a8d-67ce-46ae-a0b9-8a5133ba0dbb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.885705] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787f3018-ed11-4279-a6e7-8b3a0075de77 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.922040] env[62519]: DEBUG nova.compute.manager [req-657cb994-c290-4ccb-b5cf-b36f8b668f83 req-5e967a86-4911-4c0a-b716-7306057828e0 service nova] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Detach interface failed, port_id=082d375d-5e85-4e5e-a40e-661c492b5f5d, reason: Instance 31db4b14-0ba3-4159-accc-31c21bd81322 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1951.980458] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080257} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.980754] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1951.981602] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cdf85f-c206-4229-8e02-332b4dc928a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.005353] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a/47439070-54d8-454c-bf1d-7a2a33d82e9a.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1952.005664] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6860952-e64d-488d-a6ce-bf978430c9e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.027591] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1952.027591] env[62519]: value = "task-1803364" [ 1952.027591] env[62519]: _type = "Task" [ 1952.027591] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.040855] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803364, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.188959] env[62519]: DEBUG nova.objects.instance [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'flavor' on Instance uuid fc3beaba-2ad5-4598-b562-557fdd552b39 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1952.344167] env[62519]: INFO nova.compute.manager [-] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Took 1.47 seconds to deallocate network for instance. [ 1952.369416] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "2ea8304e-5b91-4908-a876-6e2c780b1da9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.370052] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.370635] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "2ea8304e-5b91-4908-a876-6e2c780b1da9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.371148] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.371148] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.373081] env[62519]: INFO nova.compute.manager [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Terminating instance [ 1952.538357] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803364, 'name': ReconfigVM_Task, 'duration_secs': 0.253128} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.538674] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a/47439070-54d8-454c-bf1d-7a2a33d82e9a.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1952.539315] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c33e442-6df4-46bc-be5e-94f19bd93ec8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.547976] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1952.547976] env[62519]: value = "task-1803365" [ 1952.547976] env[62519]: _type = "Task" [ 1952.547976] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.558763] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803365, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.876650] env[62519]: DEBUG nova.compute.manager [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1952.876957] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1952.877796] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5402e2a-e696-4ea3-bcdf-ab463762935f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.889350] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1952.889350] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d608aa8d-420d-4c14-bebe-f3409401b855 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.895768] env[62519]: DEBUG oslo_vmware.api [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1952.895768] env[62519]: value = "task-1803366" [ 1952.895768] env[62519]: _type = "Task" [ 1952.895768] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.902056] env[62519]: INFO nova.compute.manager [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Took 0.56 seconds to detach 1 volumes for instance. [ 1952.907577] env[62519]: DEBUG nova.compute.manager [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Deleting volume: 84bda086-0de7-4b24-8a1f-6ff7c11594b6 {{(pid=62519) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3284}} [ 1952.916049] env[62519]: DEBUG oslo_vmware.api [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803366, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.058853] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803365, 'name': Rename_Task, 'duration_secs': 0.185386} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.059231] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1953.059553] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46736969-58b6-42f0-ad3a-0f0836d13450 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.068471] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1953.068471] env[62519]: value = "task-1803368" [ 1953.068471] env[62519]: _type = "Task" [ 1953.068471] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.073635] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e853329e-9967-40cb-bffb-428460c69b3a tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.422s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.082394] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803368, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.198523] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c46172b1-9fd1-45eb-96d8-79141ef9e7d0 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.244s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.399238] env[62519]: DEBUG nova.compute.manager [req-a77290ff-410f-4af6-b19c-80990b3d8dab req-98aa5ffb-629f-4925-bdfa-e019deaa2d5a service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Received event network-changed-2ee072d4-2bdb-4a83-90dd-06086f515634 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1953.399238] env[62519]: DEBUG nova.compute.manager [req-a77290ff-410f-4af6-b19c-80990b3d8dab req-98aa5ffb-629f-4925-bdfa-e019deaa2d5a service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Refreshing instance network info cache due to event network-changed-2ee072d4-2bdb-4a83-90dd-06086f515634. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1953.399415] env[62519]: DEBUG oslo_concurrency.lockutils [req-a77290ff-410f-4af6-b19c-80990b3d8dab req-98aa5ffb-629f-4925-bdfa-e019deaa2d5a service nova] Acquiring lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.399561] env[62519]: DEBUG oslo_concurrency.lockutils [req-a77290ff-410f-4af6-b19c-80990b3d8dab req-98aa5ffb-629f-4925-bdfa-e019deaa2d5a service nova] Acquired lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.399722] env[62519]: DEBUG nova.network.neutron [req-a77290ff-410f-4af6-b19c-80990b3d8dab req-98aa5ffb-629f-4925-bdfa-e019deaa2d5a service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Refreshing network info cache for port 2ee072d4-2bdb-4a83-90dd-06086f515634 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1953.412195] env[62519]: DEBUG oslo_vmware.api [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803366, 'name': PowerOffVM_Task, 'duration_secs': 0.241855} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.414815] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1953.414815] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1953.414815] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ac579c1-b02b-4dcc-828c-b686870882ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.469505] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.469801] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.470069] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.498593] env[62519]: INFO nova.scheduler.client.report [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted allocations for instance 31db4b14-0ba3-4159-accc-31c21bd81322 [ 1953.520141] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1953.520532] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1953.520866] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Deleting the datastore file [datastore1] 2ea8304e-5b91-4908-a876-6e2c780b1da9 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1953.521353] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-147b5bd1-aa76-45d5-bfe9-b64e49603d05 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.529080] env[62519]: DEBUG oslo_vmware.api [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1953.529080] env[62519]: value = "task-1803370" [ 1953.529080] env[62519]: _type = "Task" [ 1953.529080] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.539997] env[62519]: DEBUG oslo_vmware.api [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803370, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.581262] env[62519]: DEBUG oslo_vmware.api [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803368, 'name': PowerOnVM_Task, 'duration_secs': 0.46626} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.581745] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1953.582113] env[62519]: INFO nova.compute.manager [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Took 8.03 seconds to spawn the instance on the hypervisor. [ 1953.582411] env[62519]: DEBUG nova.compute.manager [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1953.583305] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a93deda-dbcc-452c-9144-6b0b738d8744 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.010028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d63ba5ad-580c-4fa2-8333-4d3b0b757f51 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "31db4b14-0ba3-4159-accc-31c21bd81322" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.993s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.041421] env[62519]: DEBUG oslo_vmware.api [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.260857} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.041675] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1954.041852] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1954.042040] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1954.042308] env[62519]: INFO nova.compute.manager [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1954.042452] env[62519]: DEBUG oslo.service.loopingcall [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1954.042646] env[62519]: DEBUG nova.compute.manager [-] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1954.042733] env[62519]: DEBUG nova.network.neutron [-] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1954.105208] env[62519]: INFO nova.compute.manager [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Took 13.79 seconds to build instance. [ 1954.375418] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.375819] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.376062] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "fc3beaba-2ad5-4598-b562-557fdd552b39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.376310] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.377070] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.379152] env[62519]: INFO nova.compute.manager [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Terminating instance [ 1954.405349] env[62519]: DEBUG nova.network.neutron [req-a77290ff-410f-4af6-b19c-80990b3d8dab req-98aa5ffb-629f-4925-bdfa-e019deaa2d5a service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updated VIF entry in instance network info cache for port 2ee072d4-2bdb-4a83-90dd-06086f515634. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1954.405349] env[62519]: DEBUG nova.network.neutron [req-a77290ff-410f-4af6-b19c-80990b3d8dab req-98aa5ffb-629f-4925-bdfa-e019deaa2d5a service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [{"id": "2ee072d4-2bdb-4a83-90dd-06086f515634", "address": "fa:16:3e:e6:86:ec", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee072d4-2b", "ovs_interfaceid": "2ee072d4-2bdb-4a83-90dd-06086f515634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.883137] env[62519]: DEBUG nova.compute.manager [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1954.883380] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1954.884295] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3543d650-b530-426a-b920-5f5d2cf1499f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.894851] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1954.895155] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74b17fcc-7390-433c-8c78-cb151c5badd3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.903508] env[62519]: DEBUG oslo_vmware.api [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1954.903508] env[62519]: value = "task-1803371" [ 1954.903508] env[62519]: _type = "Task" [ 1954.903508] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.913318] env[62519]: DEBUG oslo_vmware.api [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.916191] env[62519]: DEBUG oslo_concurrency.lockutils [req-a77290ff-410f-4af6-b19c-80990b3d8dab req-98aa5ffb-629f-4925-bdfa-e019deaa2d5a service nova] Releasing lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.968186] env[62519]: DEBUG nova.compute.manager [req-31059106-b6d5-4c23-a530-d9af4e961eef req-35906a52-b4ef-46a3-85a0-3fe9a2172de6 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received event network-changed-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1954.968412] env[62519]: DEBUG nova.compute.manager [req-31059106-b6d5-4c23-a530-d9af4e961eef req-35906a52-b4ef-46a3-85a0-3fe9a2172de6 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Refreshing instance network info cache due to event network-changed-9a08cba9-bd48-4013-a6c0-13049c0b5fdb. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1954.968640] env[62519]: DEBUG oslo_concurrency.lockutils [req-31059106-b6d5-4c23-a530-d9af4e961eef req-35906a52-b4ef-46a3-85a0-3fe9a2172de6 service nova] Acquiring lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1954.968787] env[62519]: DEBUG oslo_concurrency.lockutils [req-31059106-b6d5-4c23-a530-d9af4e961eef req-35906a52-b4ef-46a3-85a0-3fe9a2172de6 service nova] Acquired lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1954.968946] env[62519]: DEBUG nova.network.neutron [req-31059106-b6d5-4c23-a530-d9af4e961eef req-35906a52-b4ef-46a3-85a0-3fe9a2172de6 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Refreshing network info cache for port 9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1954.994810] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.995106] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.995328] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.995515] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.995679] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.997789] env[62519]: INFO nova.compute.manager [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Terminating instance [ 1955.130211] env[62519]: DEBUG nova.network.neutron [-] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.414637] env[62519]: DEBUG oslo_vmware.api [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803371, 'name': PowerOffVM_Task, 'duration_secs': 0.294445} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.414871] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1955.414972] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1955.415241] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d499210-4d1f-4d4f-ab40-1240ad8fa533 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.426050] env[62519]: DEBUG nova.compute.manager [req-2e366d86-bfef-414c-bc65-524a1b0c9a9a req-b4234b8b-462e-4df2-b40b-545e186b6707 service nova] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Received event network-vif-deleted-224051cd-32a2-49fc-b4a3-523f444e85a2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1955.490359] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1955.490617] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1955.490804] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Deleting the datastore file [datastore1] fc3beaba-2ad5-4598-b562-557fdd552b39 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1955.491496] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05f9df84-1a94-4007-9714-264e2fde604c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.498483] env[62519]: DEBUG oslo_vmware.api [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for the task: (returnval){ [ 1955.498483] env[62519]: value = "task-1803373" [ 1955.498483] env[62519]: _type = "Task" [ 1955.498483] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.504808] env[62519]: DEBUG nova.compute.manager [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1955.505027] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1955.505835] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e81df9-2800-4f01-bf5d-320aa870b272 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.514494] env[62519]: DEBUG oslo_vmware.api [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.516721] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1955.516898] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b76b1ac0-0600-49d8-94f4-6b647b84d2c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.524082] env[62519]: DEBUG oslo_vmware.api [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1955.524082] env[62519]: value = "task-1803374" [ 1955.524082] env[62519]: _type = "Task" [ 1955.524082] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.532576] env[62519]: DEBUG oslo_vmware.api [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.619441] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7bbed6d2-ea5b-4fc9-bed3-5d105e41bd94 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.315s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.634694] env[62519]: INFO nova.compute.manager [-] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Took 1.59 seconds to deallocate network for instance. [ 1955.737419] env[62519]: DEBUG nova.network.neutron [req-31059106-b6d5-4c23-a530-d9af4e961eef req-35906a52-b4ef-46a3-85a0-3fe9a2172de6 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updated VIF entry in instance network info cache for port 9a08cba9-bd48-4013-a6c0-13049c0b5fdb. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1955.737898] env[62519]: DEBUG nova.network.neutron [req-31059106-b6d5-4c23-a530-d9af4e961eef req-35906a52-b4ef-46a3-85a0-3fe9a2172de6 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating instance_info_cache with network_info: [{"id": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "address": "fa:16:3e:82:31:6c", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a08cba9-bd", "ovs_interfaceid": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1956.010052] env[62519]: DEBUG oslo_vmware.api [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Task: {'id': task-1803373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193147} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.010052] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1956.010052] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1956.010387] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1956.010387] env[62519]: INFO nova.compute.manager [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1956.010639] env[62519]: DEBUG oslo.service.loopingcall [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1956.010838] env[62519]: DEBUG nova.compute.manager [-] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1956.010931] env[62519]: DEBUG nova.network.neutron [-] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1956.038795] env[62519]: DEBUG oslo_vmware.api [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803374, 'name': PowerOffVM_Task, 'duration_secs': 0.279379} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.039239] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1956.039545] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1956.039930] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55d8f2bd-a413-4275-8d0a-8675b802b777 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.143968] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.143968] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.143968] env[62519]: DEBUG nova.objects.instance [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lazy-loading 'resources' on Instance uuid 2ea8304e-5b91-4908-a876-6e2c780b1da9 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1956.145425] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1956.145607] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1956.145806] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleting the datastore file [datastore1] 646c9dfc-7b78-4cdb-b4f5-480c43af38c4 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1956.146121] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b240747-666b-48d9-a899-6d754b9fa0ca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.155863] env[62519]: DEBUG oslo_vmware.api [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1956.155863] env[62519]: value = "task-1803376" [ 1956.155863] env[62519]: _type = "Task" [ 1956.155863] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.167686] env[62519]: DEBUG oslo_vmware.api [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.242251] env[62519]: DEBUG oslo_concurrency.lockutils [req-31059106-b6d5-4c23-a530-d9af4e961eef req-35906a52-b4ef-46a3-85a0-3fe9a2172de6 service nova] Releasing lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.665692] env[62519]: DEBUG oslo_vmware.api [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207767} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.666026] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1956.666144] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1956.666318] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1956.666527] env[62519]: INFO nova.compute.manager [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1956.666822] env[62519]: DEBUG oslo.service.loopingcall [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1956.667138] env[62519]: DEBUG nova.compute.manager [-] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1956.667254] env[62519]: DEBUG nova.network.neutron [-] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1956.814906] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca562ddb-f2b7-45ea-a27e-5f146d5ccf50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.822239] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52865130-baef-4872-b852-f1363262a25d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.862844] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b7ff22-d11c-4179-92ce-db5dc57e520d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.871643] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc6162b-a82c-45ee-bc4c-6633df6eb1dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.885510] env[62519]: DEBUG nova.compute.provider_tree [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1956.978811] env[62519]: DEBUG nova.network.neutron [-] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.388495] env[62519]: DEBUG nova.scheduler.client.report [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1957.456288] env[62519]: DEBUG nova.compute.manager [req-81414197-c6b0-4213-839e-77f2bb0e6f88 req-bee3528e-6d5e-49a5-9e8c-0e5e0fa6d22e service nova] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Received event network-vif-deleted-64341ffd-0da7-4574-9702-3fc0a03eb0e3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1957.456504] env[62519]: DEBUG nova.compute.manager [req-81414197-c6b0-4213-839e-77f2bb0e6f88 req-bee3528e-6d5e-49a5-9e8c-0e5e0fa6d22e service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Received event network-vif-deleted-cb01cf13-b6c4-4f35-b75b-86f1ba67b87a {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1957.456668] env[62519]: INFO nova.compute.manager [req-81414197-c6b0-4213-839e-77f2bb0e6f88 req-bee3528e-6d5e-49a5-9e8c-0e5e0fa6d22e service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Neutron deleted interface cb01cf13-b6c4-4f35-b75b-86f1ba67b87a; detaching it from the instance and deleting it from the info cache [ 1957.456829] env[62519]: DEBUG nova.network.neutron [req-81414197-c6b0-4213-839e-77f2bb0e6f88 req-bee3528e-6d5e-49a5-9e8c-0e5e0fa6d22e service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.482344] env[62519]: INFO nova.compute.manager [-] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Took 1.47 seconds to deallocate network for instance. [ 1957.487038] env[62519]: DEBUG nova.network.neutron [-] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.893381] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.751s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.918947] env[62519]: INFO nova.scheduler.client.report [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Deleted allocations for instance 2ea8304e-5b91-4908-a876-6e2c780b1da9 [ 1957.960446] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e76b3a4-24e1-46b0-a0ab-6758e3c56126 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.971206] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597e6e75-a850-490b-be2e-90ab5e926fea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.989958] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.990232] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.990482] env[62519]: DEBUG nova.objects.instance [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lazy-loading 'resources' on Instance uuid fc3beaba-2ad5-4598-b562-557fdd552b39 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1957.991665] env[62519]: INFO nova.compute.manager [-] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Took 1.32 seconds to deallocate network for instance. [ 1958.002846] env[62519]: DEBUG nova.compute.manager [req-81414197-c6b0-4213-839e-77f2bb0e6f88 req-bee3528e-6d5e-49a5-9e8c-0e5e0fa6d22e service nova] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Detach interface failed, port_id=cb01cf13-b6c4-4f35-b75b-86f1ba67b87a, reason: Instance 646c9dfc-7b78-4cdb-b4f5-480c43af38c4 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1958.426420] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86a74723-e58a-4e33-bc6a-eb9bb560d3e7 tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "2ea8304e-5b91-4908-a876-6e2c780b1da9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.056s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.508178] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.627363] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92eb632b-3e2f-45db-9617-5fbb1f912df6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.636080] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e92aad6-f3b1-4e1a-bd4a-39f4ce882024 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.670803] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791fe585-57f6-4349-b583-36b6f3d966fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.682244] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e4e34f-9015-423e-a094-582e49eff5b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.699882] env[62519]: DEBUG nova.compute.provider_tree [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1959.176891] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.177200] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.177424] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.177609] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.177777] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.180575] env[62519]: INFO nova.compute.manager [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Terminating instance [ 1959.202315] env[62519]: DEBUG nova.scheduler.client.report [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1959.684978] env[62519]: DEBUG nova.compute.manager [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1959.685353] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1959.686206] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64da1d38-6fd8-4b87-bd75-1ab54d59e1bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.693992] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1959.694246] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f2bdb49-9272-4b64-a8d5-7872e8fae206 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.701269] env[62519]: DEBUG oslo_vmware.api [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1959.701269] env[62519]: value = "task-1803377" [ 1959.701269] env[62519]: _type = "Task" [ 1959.701269] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.706545] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.716s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.712013] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.204s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.712252] env[62519]: DEBUG nova.objects.instance [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lazy-loading 'resources' on Instance uuid 646c9dfc-7b78-4cdb-b4f5-480c43af38c4 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1959.713603] env[62519]: DEBUG oslo_vmware.api [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.728610] env[62519]: INFO nova.scheduler.client.report [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Deleted allocations for instance fc3beaba-2ad5-4598-b562-557fdd552b39 [ 1960.213352] env[62519]: DEBUG oslo_vmware.api [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803377, 'name': PowerOffVM_Task, 'duration_secs': 0.200393} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.213620] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1960.213788] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1960.214045] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-368e2c09-c22f-4211-8fa7-0303003b47f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.236226] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f3b88edf-7ec3-49f6-b135-edfc3b9dd392 tempest-AttachVolumeTestJSON-1131407372 tempest-AttachVolumeTestJSON-1131407372-project-member] Lock "fc3beaba-2ad5-4598-b562-557fdd552b39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.860s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.314878] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1960.315113] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1960.315300] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Deleting the datastore file [datastore1] ee4b10ba-1c56-47cf-a528-d6e65c286ddb {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1960.315577] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d72d4177-0f9b-4f12-8864-803b85142e71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.325371] env[62519]: DEBUG oslo_vmware.api [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for the task: (returnval){ [ 1960.325371] env[62519]: value = "task-1803379" [ 1960.325371] env[62519]: _type = "Task" [ 1960.325371] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.333610] env[62519]: DEBUG oslo_vmware.api [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.345876] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105dd2fd-d258-464c-82bb-a4a1661f8012 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.353009] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5440cdb3-6284-4a95-a530-3afa43881076 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.383205] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2056d65c-442b-4691-8794-0b8087bb6245 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.391051] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b7e2eb-dc7b-4885-916e-4058685ec28b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.404270] env[62519]: DEBUG nova.compute.provider_tree [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1960.835915] env[62519]: DEBUG oslo_vmware.api [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Task: {'id': task-1803379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275494} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.836195] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1960.836379] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1960.836559] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1960.836731] env[62519]: INFO nova.compute.manager [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1960.836972] env[62519]: DEBUG oslo.service.loopingcall [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1960.837182] env[62519]: DEBUG nova.compute.manager [-] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1960.837379] env[62519]: DEBUG nova.network.neutron [-] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1960.907032] env[62519]: DEBUG nova.scheduler.client.report [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1961.094154] env[62519]: DEBUG nova.compute.manager [req-50bbc6b5-22a3-4ae5-a5fc-d406a9f0e9fe req-675f03ed-d0da-4587-89b4-524a1ccab81d service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Received event network-vif-deleted-99b49b4e-aba5-450d-a2db-3b35a0313a5b {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1961.094290] env[62519]: INFO nova.compute.manager [req-50bbc6b5-22a3-4ae5-a5fc-d406a9f0e9fe req-675f03ed-d0da-4587-89b4-524a1ccab81d service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Neutron deleted interface 99b49b4e-aba5-450d-a2db-3b35a0313a5b; detaching it from the instance and deleting it from the info cache [ 1961.094449] env[62519]: DEBUG nova.network.neutron [req-50bbc6b5-22a3-4ae5-a5fc-d406a9f0e9fe req-675f03ed-d0da-4587-89b4-524a1ccab81d service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.411729] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.435737] env[62519]: INFO nova.scheduler.client.report [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted allocations for instance 646c9dfc-7b78-4cdb-b4f5-480c43af38c4 [ 1961.575069] env[62519]: DEBUG nova.network.neutron [-] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.596642] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34f62b68-f190-4abb-9219-340414e83a24 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.608385] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7635dfba-1c6d-470e-b708-560b28e5943a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.639972] env[62519]: DEBUG nova.compute.manager [req-50bbc6b5-22a3-4ae5-a5fc-d406a9f0e9fe req-675f03ed-d0da-4587-89b4-524a1ccab81d service nova] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Detach interface failed, port_id=99b49b4e-aba5-450d-a2db-3b35a0313a5b, reason: Instance ee4b10ba-1c56-47cf-a528-d6e65c286ddb could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1961.943441] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cf22bf58-f85c-480a-87a7-41b885d7b650 tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "646c9dfc-7b78-4cdb-b4f5-480c43af38c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.948s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.077441] env[62519]: INFO nova.compute.manager [-] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Took 1.24 seconds to deallocate network for instance. [ 1962.585435] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.585707] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.585928] env[62519]: DEBUG nova.objects.instance [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lazy-loading 'resources' on Instance uuid ee4b10ba-1c56-47cf-a528-d6e65c286ddb {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1962.883527] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.883779] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.194991] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d05f3f-35e7-4500-a2ae-fa47c1b4d13a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.202924] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56846902-34d2-49f4-9731-5a04f1c702db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.233498] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8157d52c-a361-4c0e-bc6d-cf98f70ba532 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.240958] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973e7df4-6141-4914-8fb1-c9f6c40b67fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.253908] env[62519]: DEBUG nova.compute.provider_tree [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1963.387636] env[62519]: DEBUG nova.compute.utils [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1963.756616] env[62519]: DEBUG nova.scheduler.client.report [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1963.890779] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.261376] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.279662] env[62519]: INFO nova.scheduler.client.report [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Deleted allocations for instance ee4b10ba-1c56-47cf-a528-d6e65c286ddb [ 1964.787375] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f81f7ae0-c2b3-40ab-b9a2-3dd6a5c5d7fc tempest-ServerRescueNegativeTestJSON-527997597 tempest-ServerRescueNegativeTestJSON-527997597-project-member] Lock "ee4b10ba-1c56-47cf-a528-d6e65c286ddb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.610s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.937283] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0d27567d-8946-4ac8-a9ea-aba59963787c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.937727] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0d27567d-8946-4ac8-a9ea-aba59963787c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.938276] env[62519]: DEBUG nova.objects.instance [None req-0d27567d-8946-4ac8-a9ea-aba59963787c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'flavor' on Instance uuid c884a374-ffb8-48db-97bb-d64a687694d5 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1964.991138] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.991138] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.991138] env[62519]: INFO nova.compute.manager [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Attaching volume 821ba5a3-2f09-4446-beaf-b303397b65cb to /dev/sdb [ 1965.020677] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9516a6e4-71b5-4c15-8cf8-9a84626db81c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.028783] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23da2b7-c5d8-426e-87fb-e58d2884a8c0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.046172] env[62519]: DEBUG nova.virt.block_device [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Updating existing volume attachment record: f2dc6f55-4d1f-4944-a603-12a78768cace {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1965.443474] env[62519]: DEBUG nova.objects.instance [None req-0d27567d-8946-4ac8-a9ea-aba59963787c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'pci_requests' on Instance uuid c884a374-ffb8-48db-97bb-d64a687694d5 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1965.861791] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "11d4a010-959f-4f53-94dc-7499007612ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.862086] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "11d4a010-959f-4f53-94dc-7499007612ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.863057] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "11d4a010-959f-4f53-94dc-7499007612ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.863057] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "11d4a010-959f-4f53-94dc-7499007612ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.863057] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "11d4a010-959f-4f53-94dc-7499007612ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.865645] env[62519]: INFO nova.compute.manager [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Terminating instance [ 1965.946518] env[62519]: DEBUG nova.objects.base [None req-0d27567d-8946-4ac8-a9ea-aba59963787c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1965.946755] env[62519]: DEBUG nova.network.neutron [None req-0d27567d-8946-4ac8-a9ea-aba59963787c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1966.026016] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0d27567d-8946-4ac8-a9ea-aba59963787c tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.088s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.369376] env[62519]: DEBUG nova.compute.manager [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1966.369614] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1966.370583] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d140f19-a5d8-4633-9bbb-741d6bc6ffc5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.378823] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1966.379074] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42155775-2509-4602-ac20-bda5a1f5cc42 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.385802] env[62519]: DEBUG oslo_vmware.api [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1966.385802] env[62519]: value = "task-1803385" [ 1966.385802] env[62519]: _type = "Task" [ 1966.385802] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.395468] env[62519]: DEBUG oslo_vmware.api [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803385, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.896893] env[62519]: DEBUG oslo_vmware.api [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803385, 'name': PowerOffVM_Task, 'duration_secs': 0.258967} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.897678] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1966.897678] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1966.897678] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-669632f7-5f5e-4665-acd5-fd5bb466a4bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.039475] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1967.039836] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1967.040516] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleting the datastore file [datastore1] 11d4a010-959f-4f53-94dc-7499007612ad {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1967.041203] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-397bc4a8-00ca-47b3-9e37-97624fe942ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.053239] env[62519]: DEBUG oslo_vmware.api [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for the task: (returnval){ [ 1967.053239] env[62519]: value = "task-1803387" [ 1967.053239] env[62519]: _type = "Task" [ 1967.053239] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.063845] env[62519]: DEBUG oslo_vmware.api [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.564242] env[62519]: DEBUG oslo_vmware.api [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Task: {'id': task-1803387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198265} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.564497] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1967.564679] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1967.564862] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1967.565052] env[62519]: INFO nova.compute.manager [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1967.565298] env[62519]: DEBUG oslo.service.loopingcall [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1967.565486] env[62519]: DEBUG nova.compute.manager [-] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1967.565581] env[62519]: DEBUG nova.network.neutron [-] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1967.836018] env[62519]: DEBUG nova.compute.manager [req-c7b9ae78-540d-4ce3-ad4e-f0dea3699d2b req-f039c5ed-757a-4a7d-8dd8-9f9ddf814a86 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Received event network-vif-deleted-ca1a3bbf-3f10-4a96-a67d-b77464ab25e7 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1967.836272] env[62519]: INFO nova.compute.manager [req-c7b9ae78-540d-4ce3-ad4e-f0dea3699d2b req-f039c5ed-757a-4a7d-8dd8-9f9ddf814a86 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Neutron deleted interface ca1a3bbf-3f10-4a96-a67d-b77464ab25e7; detaching it from the instance and deleting it from the info cache [ 1967.836472] env[62519]: DEBUG nova.network.neutron [req-c7b9ae78-540d-4ce3-ad4e-f0dea3699d2b req-f039c5ed-757a-4a7d-8dd8-9f9ddf814a86 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.308229] env[62519]: DEBUG nova.network.neutron [-] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.340044] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ecb63b2f-3521-4276-9a98-79674daacb95 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.351492] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c966817f-0840-4532-a9be-b8d22253f53d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.383012] env[62519]: DEBUG nova.compute.manager [req-c7b9ae78-540d-4ce3-ad4e-f0dea3699d2b req-f039c5ed-757a-4a7d-8dd8-9f9ddf814a86 service nova] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Detach interface failed, port_id=ca1a3bbf-3f10-4a96-a67d-b77464ab25e7, reason: Instance 11d4a010-959f-4f53-94dc-7499007612ad could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1968.810663] env[62519]: INFO nova.compute.manager [-] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Took 1.24 seconds to deallocate network for instance. [ 1968.991658] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.991933] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.992296] env[62519]: DEBUG nova.objects.instance [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'flavor' on Instance uuid c884a374-ffb8-48db-97bb-d64a687694d5 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1969.317105] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.318114] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.318114] env[62519]: DEBUG nova.objects.instance [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lazy-loading 'resources' on Instance uuid 11d4a010-959f-4f53-94dc-7499007612ad {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1969.591725] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1969.591963] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373866', 'volume_id': '821ba5a3-2f09-4446-beaf-b303397b65cb', 'name': 'volume-821ba5a3-2f09-4446-beaf-b303397b65cb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8e77653a-2e04-4ed7-a419-289bd4b899d7', 'attached_at': '', 'detached_at': '', 'volume_id': '821ba5a3-2f09-4446-beaf-b303397b65cb', 'serial': '821ba5a3-2f09-4446-beaf-b303397b65cb'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1969.592852] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89869028-d024-44fa-bc27-1a01c55325a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.609732] env[62519]: DEBUG nova.objects.instance [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'pci_requests' on Instance uuid c884a374-ffb8-48db-97bb-d64a687694d5 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1969.611245] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf031912-102a-4b10-bae5-6451752483ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.636353] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] volume-821ba5a3-2f09-4446-beaf-b303397b65cb/volume-821ba5a3-2f09-4446-beaf-b303397b65cb.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1969.637196] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c2b484b-14db-4a4c-82ad-da6cede70a4d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.657043] env[62519]: DEBUG oslo_vmware.api [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1969.657043] env[62519]: value = "task-1803389" [ 1969.657043] env[62519]: _type = "Task" [ 1969.657043] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.664493] env[62519]: DEBUG oslo_vmware.api [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.930845] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ea4ddd-0ac7-49c2-a016-b98721516db4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.941240] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25058488-d989-42a4-8503-203fa29f0190 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.974886] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42510b7-6ccf-4b3e-b956-dcbfaddc0961 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.983117] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69aa5e8a-4fc0-40ba-ae11-48d8a116b0a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.997443] env[62519]: DEBUG nova.compute.provider_tree [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1970.115363] env[62519]: DEBUG nova.objects.base [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1970.115460] env[62519]: DEBUG nova.network.neutron [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1970.156703] env[62519]: DEBUG nova.policy [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eca5c7b079d4785941d68d7c51df5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63a46158057949478e5c79fbe0d4d5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1970.167854] env[62519]: DEBUG oslo_vmware.api [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803389, 'name': ReconfigVM_Task, 'duration_secs': 0.367618} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.168166] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Reconfigured VM instance instance-0000006e to attach disk [datastore1] volume-821ba5a3-2f09-4446-beaf-b303397b65cb/volume-821ba5a3-2f09-4446-beaf-b303397b65cb.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1970.173097] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb3b1368-7567-4771-82f4-b810151c14c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.189157] env[62519]: DEBUG oslo_vmware.api [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1970.189157] env[62519]: value = "task-1803390" [ 1970.189157] env[62519]: _type = "Task" [ 1970.189157] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.197943] env[62519]: DEBUG oslo_vmware.api [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.394840] env[62519]: DEBUG nova.network.neutron [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Successfully created port: 0f1a716e-2238-498b-9d31-8516cb0d084d {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1970.500334] env[62519]: DEBUG nova.scheduler.client.report [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1970.699612] env[62519]: DEBUG oslo_vmware.api [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803390, 'name': ReconfigVM_Task, 'duration_secs': 0.144245} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.699925] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373866', 'volume_id': '821ba5a3-2f09-4446-beaf-b303397b65cb', 'name': 'volume-821ba5a3-2f09-4446-beaf-b303397b65cb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8e77653a-2e04-4ed7-a419-289bd4b899d7', 'attached_at': '', 'detached_at': '', 'volume_id': '821ba5a3-2f09-4446-beaf-b303397b65cb', 'serial': '821ba5a3-2f09-4446-beaf-b303397b65cb'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1971.005482] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.688s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.029151] env[62519]: INFO nova.scheduler.client.report [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Deleted allocations for instance 11d4a010-959f-4f53-94dc-7499007612ad [ 1971.283956] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "618a1db6-4056-4380-b5df-395ac14165a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.283956] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "618a1db6-4056-4380-b5df-395ac14165a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.284153] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "618a1db6-4056-4380-b5df-395ac14165a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.284218] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "618a1db6-4056-4380-b5df-395ac14165a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.284387] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "618a1db6-4056-4380-b5df-395ac14165a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.286525] env[62519]: INFO nova.compute.manager [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Terminating instance [ 1971.536854] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6db8abcc-c77e-4993-a23a-2c588e65bdae tempest-ServerActionsTestOtherA-311064147 tempest-ServerActionsTestOtherA-311064147-project-member] Lock "11d4a010-959f-4f53-94dc-7499007612ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.675s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.737377] env[62519]: DEBUG nova.objects.instance [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'flavor' on Instance uuid 8e77653a-2e04-4ed7-a419-289bd4b899d7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1971.789959] env[62519]: DEBUG nova.compute.manager [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1971.791175] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1971.791613] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9857344b-a82d-40cd-a167-ec0e073e3553 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.798889] env[62519]: DEBUG nova.compute.manager [req-fa33a82c-5a37-493d-b07f-fe568a62fedb req-a3a6a581-dee6-4089-9a71-3377e2adcbc1 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-vif-plugged-0f1a716e-2238-498b-9d31-8516cb0d084d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1971.799118] env[62519]: DEBUG oslo_concurrency.lockutils [req-fa33a82c-5a37-493d-b07f-fe568a62fedb req-a3a6a581-dee6-4089-9a71-3377e2adcbc1 service nova] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.799324] env[62519]: DEBUG oslo_concurrency.lockutils [req-fa33a82c-5a37-493d-b07f-fe568a62fedb req-a3a6a581-dee6-4089-9a71-3377e2adcbc1 service nova] Lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.799491] env[62519]: DEBUG oslo_concurrency.lockutils [req-fa33a82c-5a37-493d-b07f-fe568a62fedb req-a3a6a581-dee6-4089-9a71-3377e2adcbc1 service nova] Lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.799657] env[62519]: DEBUG nova.compute.manager [req-fa33a82c-5a37-493d-b07f-fe568a62fedb req-a3a6a581-dee6-4089-9a71-3377e2adcbc1 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] No waiting events found dispatching network-vif-plugged-0f1a716e-2238-498b-9d31-8516cb0d084d {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1971.799866] env[62519]: WARNING nova.compute.manager [req-fa33a82c-5a37-493d-b07f-fe568a62fedb req-a3a6a581-dee6-4089-9a71-3377e2adcbc1 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received unexpected event network-vif-plugged-0f1a716e-2238-498b-9d31-8516cb0d084d for instance with vm_state active and task_state None. [ 1971.802627] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1971.802867] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5aadfc9a-01ab-4570-8d4e-32c65e755aad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.811850] env[62519]: DEBUG oslo_vmware.api [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1971.811850] env[62519]: value = "task-1803391" [ 1971.811850] env[62519]: _type = "Task" [ 1971.811850] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.825653] env[62519]: DEBUG oslo_vmware.api [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803391, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.900925] env[62519]: DEBUG nova.network.neutron [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Successfully updated port: 0f1a716e-2238-498b-9d31-8516cb0d084d {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1972.242557] env[62519]: DEBUG oslo_concurrency.lockutils [None req-dcf68f38-5420-4a62-84fb-9f121041d317 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.252s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.325498] env[62519]: DEBUG oslo_vmware.api [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803391, 'name': PowerOffVM_Task, 'duration_secs': 0.220188} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.325498] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1972.325498] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1972.325498] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffe8bb13-558b-46d6-a383-86d48c877103 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.407025] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.407025] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.407025] env[62519]: DEBUG nova.network.neutron [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1972.407970] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1972.408372] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1972.408856] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleting the datastore file [datastore1] 618a1db6-4056-4380-b5df-395ac14165a7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1972.409640] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48a8ed06-55dc-499e-84b3-3a01a3c19830 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.419193] env[62519]: DEBUG oslo_vmware.api [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1972.419193] env[62519]: value = "task-1803393" [ 1972.419193] env[62519]: _type = "Task" [ 1972.419193] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.428068] env[62519]: DEBUG oslo_vmware.api [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.636933] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.637300] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.929962] env[62519]: DEBUG oslo_vmware.api [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140236} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.930264] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1972.930476] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1972.930720] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1972.930937] env[62519]: INFO nova.compute.manager [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1972.931201] env[62519]: DEBUG oslo.service.loopingcall [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1972.931393] env[62519]: DEBUG nova.compute.manager [-] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1972.931489] env[62519]: DEBUG nova.network.neutron [-] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1972.955241] env[62519]: WARNING nova.network.neutron [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] 3996e7f6-f093-4152-af91-6fb77f32a1c5 already exists in list: networks containing: ['3996e7f6-f093-4152-af91-6fb77f32a1c5']. ignoring it [ 1973.140031] env[62519]: INFO nova.compute.manager [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Detaching volume 821ba5a3-2f09-4446-beaf-b303397b65cb [ 1973.188201] env[62519]: INFO nova.virt.block_device [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Attempting to driver detach volume 821ba5a3-2f09-4446-beaf-b303397b65cb from mountpoint /dev/sdb [ 1973.188473] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1973.188645] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373866', 'volume_id': '821ba5a3-2f09-4446-beaf-b303397b65cb', 'name': 'volume-821ba5a3-2f09-4446-beaf-b303397b65cb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8e77653a-2e04-4ed7-a419-289bd4b899d7', 'attached_at': '', 'detached_at': '', 'volume_id': '821ba5a3-2f09-4446-beaf-b303397b65cb', 'serial': '821ba5a3-2f09-4446-beaf-b303397b65cb'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1973.189539] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447018f9-2883-4e7e-86b3-0752475d3f6b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.218868] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a4cb12-9bf7-4720-acf5-74206fe4e14e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.227830] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf887794-257e-4c7a-87f2-007b97ed056a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.253925] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01f18ec-79ce-4b46-a65a-70b289fa4041 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.275380] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] The volume has not been displaced from its original location: [datastore1] volume-821ba5a3-2f09-4446-beaf-b303397b65cb/volume-821ba5a3-2f09-4446-beaf-b303397b65cb.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1973.280648] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Reconfiguring VM instance instance-0000006e to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1973.281286] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba68c82f-a42b-43ba-b5de-3a73e4fdc425 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.303803] env[62519]: DEBUG oslo_vmware.api [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1973.303803] env[62519]: value = "task-1803394" [ 1973.303803] env[62519]: _type = "Task" [ 1973.303803] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.314144] env[62519]: DEBUG oslo_vmware.api [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803394, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.502769] env[62519]: DEBUG nova.network.neutron [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0f1a716e-2238-498b-9d31-8516cb0d084d", "address": "fa:16:3e:6b:ae:3d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f1a716e-22", "ovs_interfaceid": "0f1a716e-2238-498b-9d31-8516cb0d084d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.814575] env[62519]: DEBUG oslo_vmware.api [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803394, 'name': ReconfigVM_Task, 'duration_secs': 0.238809} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.814945] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Reconfigured VM instance instance-0000006e to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1973.820965] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abcc257c-1497-4a15-8c81-9bca08e97bfb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.839989] env[62519]: DEBUG nova.compute.manager [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-changed-0f1a716e-2238-498b-9d31-8516cb0d084d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1973.840206] env[62519]: DEBUG nova.compute.manager [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Refreshing instance network info cache due to event network-changed-0f1a716e-2238-498b-9d31-8516cb0d084d. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1973.840401] env[62519]: DEBUG oslo_concurrency.lockutils [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] Acquiring lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.842215] env[62519]: DEBUG oslo_vmware.api [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1973.842215] env[62519]: value = "task-1803395" [ 1973.842215] env[62519]: _type = "Task" [ 1973.842215] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.852729] env[62519]: DEBUG oslo_vmware.api [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803395, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.006200] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.006913] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.007093] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.007384] env[62519]: DEBUG oslo_concurrency.lockutils [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] Acquired lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.007567] env[62519]: DEBUG nova.network.neutron [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Refreshing network info cache for port 0f1a716e-2238-498b-9d31-8516cb0d084d {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1974.009624] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56e4276-717f-4b2e-b3bb-23e250c92a77 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.027307] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1974.027541] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1974.027698] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1974.027877] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1974.028026] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1974.028177] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1974.028382] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1974.028555] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1974.028729] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1974.028891] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1974.029073] env[62519]: DEBUG nova.virt.hardware [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1974.035298] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfiguring VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1974.036151] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eef70a72-30ce-4f9d-98d6-aa713f3c7c91 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.054468] env[62519]: DEBUG oslo_vmware.api [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1974.054468] env[62519]: value = "task-1803396" [ 1974.054468] env[62519]: _type = "Task" [ 1974.054468] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.062823] env[62519]: DEBUG oslo_vmware.api [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803396, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.143243] env[62519]: DEBUG nova.network.neutron [-] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.352722] env[62519]: DEBUG oslo_vmware.api [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803395, 'name': ReconfigVM_Task, 'duration_secs': 0.143928} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.353055] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373866', 'volume_id': '821ba5a3-2f09-4446-beaf-b303397b65cb', 'name': 'volume-821ba5a3-2f09-4446-beaf-b303397b65cb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8e77653a-2e04-4ed7-a419-289bd4b899d7', 'attached_at': '', 'detached_at': '', 'volume_id': '821ba5a3-2f09-4446-beaf-b303397b65cb', 'serial': '821ba5a3-2f09-4446-beaf-b303397b65cb'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1974.568192] env[62519]: DEBUG oslo_vmware.api [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.647067] env[62519]: INFO nova.compute.manager [-] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Took 1.72 seconds to deallocate network for instance. [ 1974.769152] env[62519]: DEBUG nova.network.neutron [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updated VIF entry in instance network info cache for port 0f1a716e-2238-498b-9d31-8516cb0d084d. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1974.769649] env[62519]: DEBUG nova.network.neutron [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0f1a716e-2238-498b-9d31-8516cb0d084d", "address": "fa:16:3e:6b:ae:3d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f1a716e-22", "ovs_interfaceid": "0f1a716e-2238-498b-9d31-8516cb0d084d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.904330] env[62519]: DEBUG nova.objects.instance [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'flavor' on Instance uuid 8e77653a-2e04-4ed7-a419-289bd4b899d7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1975.066865] env[62519]: DEBUG oslo_vmware.api [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803396, 'name': ReconfigVM_Task, 'duration_secs': 0.694254} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.067386] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.067600] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfigured VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1975.153517] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.153802] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.154024] env[62519]: DEBUG nova.objects.instance [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'resources' on Instance uuid 618a1db6-4056-4380-b5df-395ac14165a7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1975.273242] env[62519]: DEBUG oslo_concurrency.lockutils [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] Releasing lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.273624] env[62519]: DEBUG nova.compute.manager [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Received event network-vif-deleted-872d6b35-c4a4-4975-8757-3c87471ba5df {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1975.273668] env[62519]: INFO nova.compute.manager [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Neutron deleted interface 872d6b35-c4a4-4975-8757-3c87471ba5df; detaching it from the instance and deleting it from the info cache [ 1975.273832] env[62519]: DEBUG nova.network.neutron [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.577809] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0ff2d8d0-26e0-4f5f-a084-e3315728df72 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.586s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.768327] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088cdadf-e286-4037-bc42-a3cdc83fc524 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.776758] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-761cbcb7-90b5-456f-ac84-7d9029740721 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.781933] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9027d5-2dff-4db1-acc1-67840941c295 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.790018] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2064ba75-910c-446d-8fd6-b15e0cc433f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.827389] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101715d8-6874-49c2-a86f-cb133dc22893 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.835717] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddeb0770-5ab9-4495-a254-a23a3ba5c9a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.848490] env[62519]: DEBUG nova.compute.manager [req-8db7c895-bec2-4447-aff5-63760a8402dc req-74a1a838-bccf-445d-8b67-d6ddfb64763e service nova] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Detach interface failed, port_id=872d6b35-c4a4-4975-8757-3c87471ba5df, reason: Instance 618a1db6-4056-4380-b5df-395ac14165a7 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1975.859208] env[62519]: DEBUG nova.compute.provider_tree [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1975.913595] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f1afa4a3-30ce-45e9-b42e-23d158fc3355 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.276s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1976.381386] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquiring lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1976.381623] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1976.383473] env[62519]: ERROR nova.scheduler.client.report [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [req-bf13becd-ca9f-463a-b589-02026560b048] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bf13becd-ca9f-463a-b589-02026560b048"}]} [ 1976.399976] env[62519]: DEBUG nova.scheduler.client.report [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1976.414213] env[62519]: DEBUG nova.scheduler.client.report [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1976.414438] env[62519]: DEBUG nova.compute.provider_tree [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1976.429030] env[62519]: DEBUG nova.scheduler.client.report [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1976.452811] env[62519]: DEBUG nova.scheduler.client.report [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1976.562568] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cac75d1-a44e-4fd4-b38b-9d370140759c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.570905] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39546465-df76-4370-9a01-7bf21cdf9cd7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.606531] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a356b0d7-520c-46e7-8a00-2995065d5499 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.616098] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b095f0c-76ee-4d46-992b-e844e05babd2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.630688] env[62519]: DEBUG nova.compute.provider_tree [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1976.887268] env[62519]: DEBUG nova.compute.manager [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1977.042049] env[62519]: DEBUG oslo_concurrency.lockutils [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.042354] env[62519]: DEBUG oslo_concurrency.lockutils [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.042566] env[62519]: DEBUG oslo_concurrency.lockutils [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "8e77653a-2e04-4ed7-a419-289bd4b899d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.042755] env[62519]: DEBUG oslo_concurrency.lockutils [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.042926] env[62519]: DEBUG oslo_concurrency.lockutils [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.045049] env[62519]: INFO nova.compute.manager [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Terminating instance [ 1977.161092] env[62519]: DEBUG nova.scheduler.client.report [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 163 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1977.161365] env[62519]: DEBUG nova.compute.provider_tree [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 163 to 164 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1977.161548] env[62519]: DEBUG nova.compute.provider_tree [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1977.407535] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.548929] env[62519]: DEBUG nova.compute.manager [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1977.549197] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1977.550156] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2677df8-5882-4ffb-a1ac-41377590aa75 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.558259] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1977.558508] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ceb12f64-bb73-401a-9815-6de1ea3f534d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.565260] env[62519]: DEBUG oslo_vmware.api [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1977.565260] env[62519]: value = "task-1803397" [ 1977.565260] env[62519]: _type = "Task" [ 1977.565260] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.573846] env[62519]: DEBUG oslo_vmware.api [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803397, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.667132] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.513s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.669770] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.262s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.671530] env[62519]: INFO nova.compute.claims [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1977.695152] env[62519]: INFO nova.scheduler.client.report [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted allocations for instance 618a1db6-4056-4380-b5df-395ac14165a7 [ 1978.077357] env[62519]: DEBUG oslo_vmware.api [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803397, 'name': PowerOffVM_Task, 'duration_secs': 0.202027} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.077712] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1978.077812] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1978.078080] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1ffb4dd-9bb8-40af-b768-49ee4f9d546b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.157076] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1978.157076] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1978.157076] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleting the datastore file [datastore1] 8e77653a-2e04-4ed7-a419-289bd4b899d7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1978.157336] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fabe1ad-0140-4c1a-9983-eb8365808ed6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.164786] env[62519]: DEBUG oslo_vmware.api [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1978.164786] env[62519]: value = "task-1803399" [ 1978.164786] env[62519]: _type = "Task" [ 1978.164786] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.173026] env[62519]: DEBUG oslo_vmware.api [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803399, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.205763] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2b52c467-6173-43dc-9275-6f8ef384918e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "618a1db6-4056-4380-b5df-395ac14165a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.922s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.518325] env[62519]: DEBUG oslo_concurrency.lockutils [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-ebf65ce3-5f98-44e5-95de-e607cb9b75e3" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1978.518577] env[62519]: DEBUG oslo_concurrency.lockutils [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-ebf65ce3-5f98-44e5-95de-e607cb9b75e3" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.518942] env[62519]: DEBUG nova.objects.instance [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'flavor' on Instance uuid c884a374-ffb8-48db-97bb-d64a687694d5 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1978.676041] env[62519]: DEBUG oslo_vmware.api [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803399, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142195} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.676219] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1978.676334] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1978.676469] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1978.676640] env[62519]: INFO nova.compute.manager [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1978.676881] env[62519]: DEBUG oslo.service.loopingcall [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1978.677087] env[62519]: DEBUG nova.compute.manager [-] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1978.677182] env[62519]: DEBUG nova.network.neutron [-] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1978.774946] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7acf7335-8410-41ad-aa7e-ff32f109e0e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.783088] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c976526-a8ce-4e95-8932-93dbfece791f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.816067] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d497051a-09d2-49e1-99e5-a5a934b48839 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.824603] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b7ce81-4707-4f7a-a676-eb2a35ae3a40 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.842045] env[62519]: DEBUG nova.compute.provider_tree [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1979.234091] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.234394] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.234504] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 1979.238846] env[62519]: DEBUG nova.objects.instance [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'pci_requests' on Instance uuid c884a374-ffb8-48db-97bb-d64a687694d5 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1979.346614] env[62519]: DEBUG nova.scheduler.client.report [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1979.441571] env[62519]: DEBUG nova.compute.manager [req-71b28404-ca98-4189-a4d4-a8c45fbb182d req-aee2584a-c0b7-4999-b907-2c60d36e343a service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Received event network-vif-deleted-84761f23-74d8-4c26-8600-641c612981dc {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1979.441779] env[62519]: INFO nova.compute.manager [req-71b28404-ca98-4189-a4d4-a8c45fbb182d req-aee2584a-c0b7-4999-b907-2c60d36e343a service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Neutron deleted interface 84761f23-74d8-4c26-8600-641c612981dc; detaching it from the instance and deleting it from the info cache [ 1979.441949] env[62519]: DEBUG nova.network.neutron [req-71b28404-ca98-4189-a4d4-a8c45fbb182d req-aee2584a-c0b7-4999-b907-2c60d36e343a service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1979.741579] env[62519]: DEBUG nova.objects.base [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1979.741817] env[62519]: DEBUG nova.network.neutron [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1979.807582] env[62519]: DEBUG nova.policy [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eca5c7b079d4785941d68d7c51df5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63a46158057949478e5c79fbe0d4d5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1979.812975] env[62519]: DEBUG nova.network.neutron [-] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1979.852852] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.183s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.853395] env[62519]: DEBUG nova.compute.manager [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1979.945173] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-68d36d69-83e8-4b8f-b301-46d2e6f9604c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.958050] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce817f7-00a8-407b-a226-96655f45688c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.986535] env[62519]: DEBUG nova.compute.manager [req-71b28404-ca98-4189-a4d4-a8c45fbb182d req-aee2584a-c0b7-4999-b907-2c60d36e343a service nova] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Detach interface failed, port_id=84761f23-74d8-4c26-8600-641c612981dc, reason: Instance 8e77653a-2e04-4ed7-a419-289bd4b899d7 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 1980.315080] env[62519]: INFO nova.compute.manager [-] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Took 1.64 seconds to deallocate network for instance. [ 1980.358119] env[62519]: DEBUG nova.compute.utils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1980.359553] env[62519]: DEBUG nova.compute.manager [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1980.359722] env[62519]: DEBUG nova.network.neutron [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1980.399320] env[62519]: DEBUG nova.policy [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f710a5f56d8403ca67a2030e2b4266e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4a74ff4a5334009b5a8d0fe031a4cf2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1980.641923] env[62519]: DEBUG nova.compute.manager [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Stashing vm_state: active {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 1980.651097] env[62519]: DEBUG nova.network.neutron [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Successfully created port: e05c875b-eb82-4265-a67c-6c626a2365be {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1980.822318] env[62519]: DEBUG oslo_concurrency.lockutils [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.822318] env[62519]: DEBUG oslo_concurrency.lockutils [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.822318] env[62519]: DEBUG nova.objects.instance [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'resources' on Instance uuid 8e77653a-2e04-4ed7-a419-289bd4b899d7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1980.862847] env[62519]: DEBUG nova.compute.manager [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1981.168302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.275857] env[62519]: DEBUG nova.network.neutron [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Successfully updated port: ebf65ce3-5f98-44e5-95de-e607cb9b75e3 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1981.445221] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3dc876-2e68-401e-9f1b-95d623d34011 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.453955] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7572e9e4-cb5d-45df-a361-5b430de3f76a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.486374] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34463b53-29c3-48dd-948c-8ec0147337ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.492542] env[62519]: DEBUG nova.compute.manager [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-vif-plugged-ebf65ce3-5f98-44e5-95de-e607cb9b75e3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1981.492676] env[62519]: DEBUG oslo_concurrency.lockutils [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.492854] env[62519]: DEBUG oslo_concurrency.lockutils [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] Lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.493034] env[62519]: DEBUG oslo_concurrency.lockutils [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] Lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.493196] env[62519]: DEBUG nova.compute.manager [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] No waiting events found dispatching network-vif-plugged-ebf65ce3-5f98-44e5-95de-e607cb9b75e3 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1981.493359] env[62519]: WARNING nova.compute.manager [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received unexpected event network-vif-plugged-ebf65ce3-5f98-44e5-95de-e607cb9b75e3 for instance with vm_state active and task_state None. [ 1981.493513] env[62519]: DEBUG nova.compute.manager [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-changed-ebf65ce3-5f98-44e5-95de-e607cb9b75e3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1981.493661] env[62519]: DEBUG nova.compute.manager [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Refreshing instance network info cache due to event network-changed-ebf65ce3-5f98-44e5-95de-e607cb9b75e3. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1981.493839] env[62519]: DEBUG oslo_concurrency.lockutils [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] Acquiring lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.493971] env[62519]: DEBUG oslo_concurrency.lockutils [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] Acquired lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.494139] env[62519]: DEBUG nova.network.neutron [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Refreshing network info cache for port ebf65ce3-5f98-44e5-95de-e607cb9b75e3 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1981.499027] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9426be6-fdd8-42cc-825b-081709312455 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.513754] env[62519]: DEBUG nova.compute.provider_tree [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1981.780427] env[62519]: DEBUG oslo_concurrency.lockutils [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.874533] env[62519]: DEBUG nova.compute.manager [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1981.902853] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1981.903088] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1981.903252] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1981.903437] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1981.903584] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1981.903770] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1981.903989] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1981.904197] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1981.904366] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1981.904531] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1981.904706] env[62519]: DEBUG nova.virt.hardware [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1981.905573] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edd37ec-c75f-4fac-a5b7-0d506b666c20 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.915491] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1650023-bb01-4d9e-99d6-cdd435b29277 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.016763] env[62519]: DEBUG nova.scheduler.client.report [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1982.420673] env[62519]: DEBUG nova.network.neutron [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Added VIF to instance network info cache for port ebf65ce3-5f98-44e5-95de-e607cb9b75e3. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1982.421090] env[62519]: DEBUG nova.network.neutron [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0f1a716e-2238-498b-9d31-8516cb0d084d", "address": "fa:16:3e:6b:ae:3d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f1a716e-22", "ovs_interfaceid": "0f1a716e-2238-498b-9d31-8516cb0d084d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ebf65ce3-5f98-44e5-95de-e607cb9b75e3", "address": "fa:16:3e:77:41:2b", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebf65ce3-5f", "ovs_interfaceid": "ebf65ce3-5f98-44e5-95de-e607cb9b75e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.484318] env[62519]: DEBUG nova.network.neutron [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Successfully updated port: e05c875b-eb82-4265-a67c-6c626a2365be {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1982.521499] env[62519]: DEBUG oslo_concurrency.lockutils [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.524475] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.356s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.544955] env[62519]: INFO nova.scheduler.client.report [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleted allocations for instance 8e77653a-2e04-4ed7-a419-289bd4b899d7 [ 1982.923500] env[62519]: DEBUG oslo_concurrency.lockutils [req-41b3cfae-f926-48c0-b195-75686b000f19 req-7e84a167-3bfc-4227-9cf4-8f0094d37c0c service nova] Releasing lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.923934] env[62519]: DEBUG oslo_concurrency.lockutils [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.924148] env[62519]: DEBUG nova.network.neutron [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1982.987267] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquiring lock "refresh_cache-475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.987415] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquired lock "refresh_cache-475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.987552] env[62519]: DEBUG nova.network.neutron [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1983.031975] env[62519]: INFO nova.compute.claims [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1983.052928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-15266b34-9481-44be-b442-004cf4cd79bf tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "8e77653a-2e04-4ed7-a419-289bd4b899d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.010s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.462008] env[62519]: WARNING nova.network.neutron [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] 3996e7f6-f093-4152-af91-6fb77f32a1c5 already exists in list: networks containing: ['3996e7f6-f093-4152-af91-6fb77f32a1c5']. ignoring it [ 1983.462404] env[62519]: WARNING nova.network.neutron [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] 3996e7f6-f093-4152-af91-6fb77f32a1c5 already exists in list: networks containing: ['3996e7f6-f093-4152-af91-6fb77f32a1c5']. ignoring it [ 1983.462541] env[62519]: WARNING nova.network.neutron [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] 3996e7f6-f093-4152-af91-6fb77f32a1c5 already exists in list: networks containing: ['3996e7f6-f093-4152-af91-6fb77f32a1c5']. ignoring it [ 1983.463060] env[62519]: WARNING nova.network.neutron [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] ebf65ce3-5f98-44e5-95de-e607cb9b75e3 already exists in list: port_ids containing: ['ebf65ce3-5f98-44e5-95de-e607cb9b75e3']. ignoring it [ 1983.532563] env[62519]: DEBUG nova.network.neutron [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1983.538041] env[62519]: INFO nova.compute.resource_tracker [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating resource usage from migration 167dbeb9-758a-4673-a767-5e06201c0522 [ 1983.561396] env[62519]: DEBUG nova.compute.manager [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Received event network-vif-plugged-e05c875b-eb82-4265-a67c-6c626a2365be {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1983.561616] env[62519]: DEBUG oslo_concurrency.lockutils [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] Acquiring lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.561826] env[62519]: DEBUG oslo_concurrency.lockutils [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] Lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.561995] env[62519]: DEBUG oslo_concurrency.lockutils [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] Lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.562180] env[62519]: DEBUG nova.compute.manager [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] No waiting events found dispatching network-vif-plugged-e05c875b-eb82-4265-a67c-6c626a2365be {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1983.562339] env[62519]: WARNING nova.compute.manager [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Received unexpected event network-vif-plugged-e05c875b-eb82-4265-a67c-6c626a2365be for instance with vm_state building and task_state spawning. [ 1983.562548] env[62519]: DEBUG nova.compute.manager [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Received event network-changed-e05c875b-eb82-4265-a67c-6c626a2365be {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1983.562643] env[62519]: DEBUG nova.compute.manager [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Refreshing instance network info cache due to event network-changed-e05c875b-eb82-4265-a67c-6c626a2365be. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1983.562828] env[62519]: DEBUG oslo_concurrency.lockutils [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] Acquiring lock "refresh_cache-475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.646542] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e07e71-2dad-4416-9413-32dedf4829dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.655030] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30886c0c-f4d4-441a-bd01-e40b79018459 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.686799] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071fbb35-d990-4a15-911f-3159a6fc26c3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.695027] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5104c4-14f5-4299-9d2f-042734cad7fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.709542] env[62519]: DEBUG nova.compute.provider_tree [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1983.803491] env[62519]: DEBUG nova.network.neutron [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Updating instance_info_cache with network_info: [{"id": "e05c875b-eb82-4265-a67c-6c626a2365be", "address": "fa:16:3e:d9:5f:d9", "network": {"id": "13ae16fa-2a78-4463-a3c2-2db28df9d3f4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-220747137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4a74ff4a5334009b5a8d0fe031a4cf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape05c875b-eb", "ovs_interfaceid": "e05c875b-eb82-4265-a67c-6c626a2365be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.102454] env[62519]: DEBUG nova.network.neutron [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0f1a716e-2238-498b-9d31-8516cb0d084d", "address": "fa:16:3e:6b:ae:3d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f1a716e-22", "ovs_interfaceid": "0f1a716e-2238-498b-9d31-8516cb0d084d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ebf65ce3-5f98-44e5-95de-e607cb9b75e3", "address": "fa:16:3e:77:41:2b", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebf65ce3-5f", "ovs_interfaceid": "ebf65ce3-5f98-44e5-95de-e607cb9b75e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.212605] env[62519]: DEBUG nova.scheduler.client.report [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1984.306181] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Releasing lock "refresh_cache-475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.306583] env[62519]: DEBUG nova.compute.manager [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Instance network_info: |[{"id": "e05c875b-eb82-4265-a67c-6c626a2365be", "address": "fa:16:3e:d9:5f:d9", "network": {"id": "13ae16fa-2a78-4463-a3c2-2db28df9d3f4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-220747137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4a74ff4a5334009b5a8d0fe031a4cf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape05c875b-eb", "ovs_interfaceid": "e05c875b-eb82-4265-a67c-6c626a2365be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1984.307206] env[62519]: DEBUG oslo_concurrency.lockutils [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] Acquired lock "refresh_cache-475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.307435] env[62519]: DEBUG nova.network.neutron [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Refreshing network info cache for port e05c875b-eb82-4265-a67c-6c626a2365be {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1984.309218] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:5f:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '09bf081b-cdf0-4977-abe2-2339a87409ab', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e05c875b-eb82-4265-a67c-6c626a2365be', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1984.316400] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Creating folder: Project (e4a74ff4a5334009b5a8d0fe031a4cf2). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1984.319348] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-136db5e2-7c25-4496-861b-35fe4fd4af1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.333356] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Created folder: Project (e4a74ff4a5334009b5a8d0fe031a4cf2) in parent group-v373567. [ 1984.333572] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Creating folder: Instances. Parent ref: group-v373867. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1984.334073] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-edce2af8-c787-43d5-b14b-51ebd5f6f488 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.345336] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Created folder: Instances in parent group-v373867. [ 1984.345614] env[62519]: DEBUG oslo.service.loopingcall [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1984.345829] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1984.346139] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-288bd402-3697-4183-bbce-f0fa14a8cf2a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.368576] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1984.368576] env[62519]: value = "task-1803402" [ 1984.368576] env[62519]: _type = "Task" [ 1984.368576] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.377621] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803402, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.535141] env[62519]: DEBUG nova.network.neutron [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Updated VIF entry in instance network info cache for port e05c875b-eb82-4265-a67c-6c626a2365be. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1984.535532] env[62519]: DEBUG nova.network.neutron [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Updating instance_info_cache with network_info: [{"id": "e05c875b-eb82-4265-a67c-6c626a2365be", "address": "fa:16:3e:d9:5f:d9", "network": {"id": "13ae16fa-2a78-4463-a3c2-2db28df9d3f4", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-220747137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4a74ff4a5334009b5a8d0fe031a4cf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape05c875b-eb", "ovs_interfaceid": "e05c875b-eb82-4265-a67c-6c626a2365be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.604957] env[62519]: DEBUG oslo_concurrency.lockutils [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.605656] env[62519]: DEBUG oslo_concurrency.lockutils [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.605816] env[62519]: DEBUG oslo_concurrency.lockutils [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.606905] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bfbb13-9e79-4c30-b6e8-c8efb2e37f37 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.626389] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1984.626643] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1984.626799] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1984.626983] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1984.627153] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1984.627413] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1984.627506] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1984.628472] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1984.628472] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1984.628472] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1984.628472] env[62519]: DEBUG nova.virt.hardware [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1984.634734] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfiguring VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1984.634734] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-898c492f-d6f9-46ee-a176-2920ece6a229 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.652785] env[62519]: DEBUG oslo_vmware.api [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1984.652785] env[62519]: value = "task-1803403" [ 1984.652785] env[62519]: _type = "Task" [ 1984.652785] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.660905] env[62519]: DEBUG oslo_vmware.api [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803403, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.718400] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.194s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.718696] env[62519]: INFO nova.compute.manager [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Migrating [ 1984.878680] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803402, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.038206] env[62519]: DEBUG oslo_concurrency.lockutils [req-950028f3-e607-4d48-96a0-65d328d8a387 req-e39bc692-3ba4-49d0-b6bd-6b8971cb604a service nova] Releasing lock "refresh_cache-475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.163728] env[62519]: DEBUG oslo_vmware.api [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.199924] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "c9399643-7d74-4064-a721-e6d038a5cef0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.200170] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.233106] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.233278] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.233450] env[62519]: DEBUG nova.network.neutron [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1985.265419] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 1985.265624] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.265784] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.265933] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.266095] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.266245] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.266387] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.266511] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 1985.266659] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1985.378331] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803402, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.664707] env[62519]: DEBUG oslo_vmware.api [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.702447] env[62519]: DEBUG nova.compute.manager [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 1985.769429] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.769680] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.769680] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.769815] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1985.770774] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa064ae-3f71-4ee3-b577-3846bba9f1f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.781578] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7b5394-6177-49bc-a40a-1a10afb8769b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.797364] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7356c5ab-49b6-4df5-9d71-5bfd74cf1d92 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.805316] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867cea4a-a557-49f1-beba-9cb9fe7314c1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.835922] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179612MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1985.836105] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.836315] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.880349] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803402, 'name': CreateVM_Task, 'duration_secs': 1.359611} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.880349] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1985.889759] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.889945] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.890426] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1985.890702] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-568d4c03-eca3-4523-882f-2d8b23357294 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.896786] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1985.896786] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a59e09-9607-5355-21ac-0529438ac45b" [ 1985.896786] env[62519]: _type = "Task" [ 1985.896786] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.908260] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a59e09-9607-5355-21ac-0529438ac45b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.006529] env[62519]: DEBUG nova.network.neutron [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1986.166626] env[62519]: DEBUG oslo_vmware.api [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803403, 'name': ReconfigVM_Task, 'duration_secs': 1.316694} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.167080] env[62519]: DEBUG oslo_concurrency.lockutils [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.167353] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfigured VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1986.226531] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.409862] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a59e09-9607-5355-21ac-0529438ac45b, 'name': SearchDatastore_Task, 'duration_secs': 0.012166} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.410210] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.410446] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1986.410710] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1986.410841] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1986.411027] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1986.411283] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fa1277f-1370-405e-8f54-0858d1ccf860 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.420494] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1986.420659] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1986.421386] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21a159d6-1c65-4845-8616-b364bd0edefb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.427401] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1986.427401] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5279a29f-0fb8-f005-810b-e729853ae284" [ 1986.427401] env[62519]: _type = "Task" [ 1986.427401] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.434604] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5279a29f-0fb8-f005-810b-e729853ae284, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.509252] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.672491] env[62519]: DEBUG oslo_concurrency.lockutils [None req-80de45e2-b315-43ac-8ff1-6a4b5433dfa5 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-ebf65ce3-5f98-44e5-95de-e607cb9b75e3" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.154s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.845159] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Applying migration context for instance 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 as it has an incoming, in-progress migration 167dbeb9-758a-4673-a767-5e06201c0522. Migration status is pre-migrating {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1986.846091] env[62519]: INFO nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating resource usage from migration 167dbeb9-758a-4673-a767-5e06201c0522 [ 1986.863089] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c884a374-ffb8-48db-97bb-d64a687694d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.863254] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 9bf88b2f-63f9-466b-8669-45f17319055d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.863377] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 47439070-54d8-454c-bf1d-7a2a33d82e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.863494] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.863610] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Migration 167dbeb9-758a-4673-a767-5e06201c0522 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1986.863796] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.943565] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5279a29f-0fb8-f005-810b-e729853ae284, 'name': SearchDatastore_Task, 'duration_secs': 0.009028} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.943565] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3660ef96-1391-40be-8424-7b2ac4ecb44e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.949944] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1986.949944] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52915d0a-841a-a1c1-3d64-be98fcaa7aaa" [ 1986.949944] env[62519]: _type = "Task" [ 1986.949944] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.959258] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52915d0a-841a-a1c1-3d64-be98fcaa7aaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.366650] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c9399643-7d74-4064-a721-e6d038a5cef0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1987.366913] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1987.367123] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1987.460524] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52915d0a-841a-a1c1-3d64-be98fcaa7aaa, 'name': SearchDatastore_Task, 'duration_secs': 0.011391} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.462123] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1987.462123] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0/475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1987.462644] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886d9dd0-95eb-4887-9d3a-045fe417cf64 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.465092] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09411d4f-3c18-44e8-8943-45660dd9040c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.472602] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882edbc2-79c6-48dc-b1c3-ca3c0cf60f37 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.476646] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1987.476646] env[62519]: value = "task-1803404" [ 1987.476646] env[62519]: _type = "Task" [ 1987.476646] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.505519] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9347a9-4cb5-477d-9150-4ae852f8d036 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.510896] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803404, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.516030] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac2c150-636d-4bff-903d-e8e8dbcc842f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.531779] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1987.987313] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803404, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.028177] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d6765a-5829-40c4-9c3d-1edbcfcd034b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.048218] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1988.051374] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance '8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1988.359205] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "9bf88b2f-63f9-466b-8669-45f17319055d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.359457] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.488345] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803404, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593116} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.488602] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0/475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1988.488800] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1988.489058] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed3f6592-bb25-4787-bb34-afe9a35a04f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.495820] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1988.495820] env[62519]: value = "task-1803405" [ 1988.495820] env[62519]: _type = "Task" [ 1988.495820] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.503804] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803405, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.555910] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1988.556145] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.720s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.557913] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1988.558239] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.332s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.559695] env[62519]: INFO nova.compute.claims [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1988.563023] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b194b01-f4b1-4aed-b25f-d58b459bc6b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.570043] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1988.570043] env[62519]: value = "task-1803406" [ 1988.570043] env[62519]: _type = "Task" [ 1988.570043] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.579449] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.862554] env[62519]: DEBUG nova.compute.utils [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1989.006914] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803405, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071712} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.007210] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1989.007961] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e305e9-a293-494a-8e1c-da1b8ccb6caa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.029980] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0/475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1989.030541] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0663c85c-e2e0-4dbc-8456-ada89a084f2a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.049890] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1989.049890] env[62519]: value = "task-1803407" [ 1989.049890] env[62519]: _type = "Task" [ 1989.049890] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.057388] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.080948] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803406, 'name': PowerOffVM_Task, 'duration_secs': 0.214702} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.081222] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1989.081449] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance '8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1989.366603] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.453739] env[62519]: DEBUG oslo_concurrency.lockutils [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-0f1a716e-2238-498b-9d31-8516cb0d084d" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.453993] env[62519]: DEBUG oslo_concurrency.lockutils [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-0f1a716e-2238-498b-9d31-8516cb0d084d" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.560638] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803407, 'name': ReconfigVM_Task, 'duration_secs': 0.366909} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.560947] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0/475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1989.561712] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3db04f9e-2d0f-48a8-b172-435cbb7d2239 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.568576] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1989.568576] env[62519]: value = "task-1803408" [ 1989.568576] env[62519]: _type = "Task" [ 1989.568576] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.579024] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803408, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.588146] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1989.588384] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1989.588561] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1989.588750] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1989.588895] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1989.589053] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1989.589261] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1989.589415] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1989.589578] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1989.589738] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1989.589910] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1989.597565] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b1ccd87-b05e-44f7-a30f-b2d1e5e1d733 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.614018] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1989.614018] env[62519]: value = "task-1803409" [ 1989.614018] env[62519]: _type = "Task" [ 1989.614018] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.625619] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.693104] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f09009-1071-423a-9945-d4fe6c453580 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.701164] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79285fae-5ceb-442e-af01-d5a00952ac42 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.732298] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2350b5c0-6756-44a3-b617-9382e6b318d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.741931] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b748f00b-bc5a-483d-91c2-5ffc2cf01514 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.755961] env[62519]: DEBUG nova.compute.provider_tree [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1989.956911] env[62519]: DEBUG oslo_concurrency.lockutils [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.957148] env[62519]: DEBUG oslo_concurrency.lockutils [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.958457] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd54b94-834d-4893-a7ff-822f22df36dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.977082] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc13de9e-0baf-4911-98fe-c9a63a8eee3a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.006959] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfiguring VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1990.007246] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06029a83-5023-4767-95ae-e9de3e6bd064 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.033174] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 1990.033174] env[62519]: value = "task-1803410" [ 1990.033174] env[62519]: _type = "Task" [ 1990.033174] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.041653] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.080860] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803408, 'name': Rename_Task, 'duration_secs': 0.214524} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.081160] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1990.081407] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-390ece8f-2ea2-4ce8-9a1c-fd0fbcd43175 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.088040] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1990.088040] env[62519]: value = "task-1803411" [ 1990.088040] env[62519]: _type = "Task" [ 1990.088040] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.096273] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803411, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.123116] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803409, 'name': ReconfigVM_Task, 'duration_secs': 0.264758} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.123373] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance '8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1990.258916] env[62519]: DEBUG nova.scheduler.client.report [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1990.425124] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "9bf88b2f-63f9-466b-8669-45f17319055d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1990.425404] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.425647] env[62519]: INFO nova.compute.manager [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Attaching volume a0f01d64-f203-4e5a-88dd-8cca1d0cd19a to /dev/sdb [ 1990.456152] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e14791b-623d-47c9-88d6-eb217ed98e1f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.463764] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a899568-8e51-4416-b8c1-99032743ba80 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.478114] env[62519]: DEBUG nova.virt.block_device [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating existing volume attachment record: aad8ae54-ef51-4426-83f9-cb4895d25f86 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1990.542676] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.597790] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803411, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.630364] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1990.630687] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1990.630897] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1990.631133] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1990.631351] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1990.631640] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1990.631911] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1990.632094] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1990.632268] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1990.632430] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1990.632599] env[62519]: DEBUG nova.virt.hardware [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1990.637972] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Reconfiguring VM instance instance-00000034 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1990.638259] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-669731bf-678a-41d1-b131-b8bf191ee1f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.656625] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1990.656625] env[62519]: value = "task-1803413" [ 1990.656625] env[62519]: _type = "Task" [ 1990.656625] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.665390] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803413, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.763666] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.764169] env[62519]: DEBUG nova.compute.manager [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1991.044120] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.099505] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803411, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.167013] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803413, 'name': ReconfigVM_Task, 'duration_secs': 0.209087} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.167308] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Reconfigured VM instance instance-00000034 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1991.168082] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa1a4b0-a5a1-4021-97eb-3d32c2db2bc8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.190251] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8/8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1991.190551] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15d2b0a0-4b66-4f1c-a7a4-98598deb8f55 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.208335] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1991.208335] env[62519]: value = "task-1803416" [ 1991.208335] env[62519]: _type = "Task" [ 1991.208335] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.216864] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803416, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.269524] env[62519]: DEBUG nova.compute.utils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1991.271171] env[62519]: DEBUG nova.compute.manager [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 1991.271411] env[62519]: DEBUG nova.network.neutron [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1991.313162] env[62519]: DEBUG nova.policy [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2fdec6fcda84ddeaaa1ee4ba6a58258', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17cd969b1e7d4bd795748560caf80077', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 1991.545515] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.595793] env[62519]: DEBUG nova.network.neutron [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Successfully created port: a8915627-fabb-4472-92ee-8c09b3c07a92 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1991.601342] env[62519]: DEBUG oslo_vmware.api [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803411, 'name': PowerOnVM_Task, 'duration_secs': 1.470213} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.601342] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1991.601475] env[62519]: INFO nova.compute.manager [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Took 9.73 seconds to spawn the instance on the hypervisor. [ 1991.601578] env[62519]: DEBUG nova.compute.manager [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 1991.602424] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e258bc98-72be-47b8-b21c-122db1cc90fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.718899] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803416, 'name': ReconfigVM_Task, 'duration_secs': 0.287049} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.719197] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8/8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1991.719477] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance '8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1991.774739] env[62519]: DEBUG nova.compute.manager [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 1992.044887] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.118716] env[62519]: INFO nova.compute.manager [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Took 14.73 seconds to build instance. [ 1992.225928] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d1a58f-f2e6-42fd-880a-ef47a02bbe82 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.246523] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ed3010-1a5b-47d7-bda5-91959484adb6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.263840] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance '8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1992.545714] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.783543] env[62519]: DEBUG nova.compute.manager [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 1992.806226] env[62519]: DEBUG nova.network.neutron [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Port c3e2a054-4826-4bd6-8c9e-74005e7912e4 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1992.810054] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1992.810336] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1992.810510] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1992.810872] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1992.811095] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1992.811369] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1992.811515] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1992.811779] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1992.812013] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1992.812201] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1992.812381] env[62519]: DEBUG nova.virt.hardware [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1992.813268] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d096b66-dcb6-4422-b042-87897d680175 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.822079] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77d679e-af60-4a85-a6e7-b7e8f35c0bcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.992312] env[62519]: DEBUG nova.compute.manager [req-e2003ec8-6b7a-4bb7-bd55-1fa9bfe769bd req-4cf09992-bd5c-4fb8-a81f-3deae4598c9e service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Received event network-vif-plugged-a8915627-fabb-4472-92ee-8c09b3c07a92 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1992.992525] env[62519]: DEBUG oslo_concurrency.lockutils [req-e2003ec8-6b7a-4bb7-bd55-1fa9bfe769bd req-4cf09992-bd5c-4fb8-a81f-3deae4598c9e service nova] Acquiring lock "c9399643-7d74-4064-a721-e6d038a5cef0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.992823] env[62519]: DEBUG oslo_concurrency.lockutils [req-e2003ec8-6b7a-4bb7-bd55-1fa9bfe769bd req-4cf09992-bd5c-4fb8-a81f-3deae4598c9e service nova] Lock "c9399643-7d74-4064-a721-e6d038a5cef0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.996614] env[62519]: DEBUG oslo_concurrency.lockutils [req-e2003ec8-6b7a-4bb7-bd55-1fa9bfe769bd req-4cf09992-bd5c-4fb8-a81f-3deae4598c9e service nova] Lock "c9399643-7d74-4064-a721-e6d038a5cef0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.996614] env[62519]: DEBUG nova.compute.manager [req-e2003ec8-6b7a-4bb7-bd55-1fa9bfe769bd req-4cf09992-bd5c-4fb8-a81f-3deae4598c9e service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] No waiting events found dispatching network-vif-plugged-a8915627-fabb-4472-92ee-8c09b3c07a92 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 1992.996614] env[62519]: WARNING nova.compute.manager [req-e2003ec8-6b7a-4bb7-bd55-1fa9bfe769bd req-4cf09992-bd5c-4fb8-a81f-3deae4598c9e service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Received unexpected event network-vif-plugged-a8915627-fabb-4472-92ee-8c09b3c07a92 for instance with vm_state building and task_state spawning. [ 1993.047407] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.086954] env[62519]: DEBUG nova.network.neutron [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Successfully updated port: a8915627-fabb-4472-92ee-8c09b3c07a92 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1993.499646] env[62519]: DEBUG oslo_concurrency.lockutils [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.499934] env[62519]: DEBUG oslo_concurrency.lockutils [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.500147] env[62519]: INFO nova.compute.manager [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Shelving [ 1993.546758] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.589741] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "refresh_cache-c9399643-7d74-4064-a721-e6d038a5cef0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1993.589880] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "refresh_cache-c9399643-7d74-4064-a721-e6d038a5cef0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1993.590042] env[62519]: DEBUG nova.network.neutron [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1993.626284] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ee7e63e1-8123-4346-896d-38126246507f tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.245s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.750117] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquiring lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.750422] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.750687] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquiring lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.750887] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.751071] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.753416] env[62519]: INFO nova.compute.manager [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Terminating instance [ 1993.828282] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.828456] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.828627] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.047607] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.121972] env[62519]: DEBUG nova.network.neutron [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1994.257476] env[62519]: DEBUG nova.compute.manager [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 1994.257718] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1994.258724] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15337145-3b54-4caf-89b4-812c9f66eb9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.266703] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1994.267734] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32cc4137-03b8-4d45-a981-c3ab4fa13e2b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.269130] env[62519]: DEBUG nova.network.neutron [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Updating instance_info_cache with network_info: [{"id": "a8915627-fabb-4472-92ee-8c09b3c07a92", "address": "fa:16:3e:55:40:95", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8915627-fa", "ovs_interfaceid": "a8915627-fabb-4472-92ee-8c09b3c07a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1994.275627] env[62519]: DEBUG oslo_vmware.api [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1994.275627] env[62519]: value = "task-1803418" [ 1994.275627] env[62519]: _type = "Task" [ 1994.275627] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.284189] env[62519]: DEBUG oslo_vmware.api [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.511035] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1994.511035] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2b60870-32e6-4e03-9735-c710a427c216 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.517971] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1994.517971] env[62519]: value = "task-1803419" [ 1994.517971] env[62519]: _type = "Task" [ 1994.517971] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.526442] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.546880] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.772190] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "refresh_cache-c9399643-7d74-4064-a721-e6d038a5cef0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1994.772551] env[62519]: DEBUG nova.compute.manager [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Instance network_info: |[{"id": "a8915627-fabb-4472-92ee-8c09b3c07a92", "address": "fa:16:3e:55:40:95", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8915627-fa", "ovs_interfaceid": "a8915627-fabb-4472-92ee-8c09b3c07a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 1994.773019] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:40:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89f807d9-140f-4a6f-8bce-96795f9482ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8915627-fabb-4472-92ee-8c09b3c07a92', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1994.781327] env[62519]: DEBUG oslo.service.loopingcall [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1994.781664] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1994.785581] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58fff0f4-855b-4e0e-8a9d-2f0a9c49425b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.806375] env[62519]: DEBUG oslo_vmware.api [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803418, 'name': PowerOffVM_Task, 'duration_secs': 0.18901} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.807594] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1994.807787] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1994.808016] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1994.808016] env[62519]: value = "task-1803420" [ 1994.808016] env[62519]: _type = "Task" [ 1994.808016] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.808184] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21a22b3f-9555-4fb9-9561-33241bd52069 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.817197] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803420, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.864039] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.864221] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.864397] env[62519]: DEBUG nova.network.neutron [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1994.892471] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1994.892729] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1994.892986] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Deleting the datastore file [datastore1] 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1994.893269] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9ae0227-6a21-4ab2-9243-763a172713af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.899461] env[62519]: DEBUG oslo_vmware.api [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for the task: (returnval){ [ 1994.899461] env[62519]: value = "task-1803422" [ 1994.899461] env[62519]: _type = "Task" [ 1994.899461] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.906990] env[62519]: DEBUG oslo_vmware.api [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803422, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.024495] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1995.024786] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373871', 'volume_id': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'name': 'volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9bf88b2f-63f9-466b-8669-45f17319055d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'serial': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1995.026028] env[62519]: DEBUG nova.compute.manager [req-b84dfd07-d8aa-43da-a7f8-43a8af019a33 req-2be115df-4f29-4a7d-871a-e1ecebfb1423 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Received event network-changed-a8915627-fabb-4472-92ee-8c09b3c07a92 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1995.026252] env[62519]: DEBUG nova.compute.manager [req-b84dfd07-d8aa-43da-a7f8-43a8af019a33 req-2be115df-4f29-4a7d-871a-e1ecebfb1423 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Refreshing instance network info cache due to event network-changed-a8915627-fabb-4472-92ee-8c09b3c07a92. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 1995.026470] env[62519]: DEBUG oslo_concurrency.lockutils [req-b84dfd07-d8aa-43da-a7f8-43a8af019a33 req-2be115df-4f29-4a7d-871a-e1ecebfb1423 service nova] Acquiring lock "refresh_cache-c9399643-7d74-4064-a721-e6d038a5cef0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.026614] env[62519]: DEBUG oslo_concurrency.lockutils [req-b84dfd07-d8aa-43da-a7f8-43a8af019a33 req-2be115df-4f29-4a7d-871a-e1ecebfb1423 service nova] Acquired lock "refresh_cache-c9399643-7d74-4064-a721-e6d038a5cef0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.026799] env[62519]: DEBUG nova.network.neutron [req-b84dfd07-d8aa-43da-a7f8-43a8af019a33 req-2be115df-4f29-4a7d-871a-e1ecebfb1423 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Refreshing network info cache for port a8915627-fabb-4472-92ee-8c09b3c07a92 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1995.029141] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96f9418-a0de-4944-8af9-187d2c1bdc17 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.038625] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803419, 'name': PowerOffVM_Task, 'duration_secs': 0.137844} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.050064] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1995.051721] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96132c09-a4a3-4be7-9d3f-6d9d477704b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.057728] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb04b7c-e603-40f1-8efd-f56414535b29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.066312] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.102827] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a/volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1995.103321] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f506344a-7db3-4a4d-b854-9fb8841c717d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.116916] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a18edd-5354-4bd9-a7fa-c4cadee95e60 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.128692] env[62519]: DEBUG oslo_vmware.api [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1995.128692] env[62519]: value = "task-1803423" [ 1995.128692] env[62519]: _type = "Task" [ 1995.128692] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.137175] env[62519]: DEBUG oslo_vmware.api [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.319909] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803420, 'name': CreateVM_Task, 'duration_secs': 0.332392} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.320156] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1995.321168] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.322028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.322028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1995.322625] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fbcd926-05d9-4b4c-b0f8-efeb7454f8ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.328840] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1995.328840] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a4e91c-1a3a-7446-1a4d-69627c3396f0" [ 1995.328840] env[62519]: _type = "Task" [ 1995.328840] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.337730] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a4e91c-1a3a-7446-1a4d-69627c3396f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.409809] env[62519]: DEBUG oslo_vmware.api [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Task: {'id': task-1803422, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149891} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.410089] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1995.410281] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1995.410456] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1995.410629] env[62519]: INFO nova.compute.manager [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1995.411092] env[62519]: DEBUG oslo.service.loopingcall [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1995.411321] env[62519]: DEBUG nova.compute.manager [-] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 1995.411419] env[62519]: DEBUG nova.network.neutron [-] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1995.560760] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.630717] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1995.631084] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ba4f0be0-9e0d-42aa-8452-2691cb324566 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.642246] env[62519]: DEBUG oslo_vmware.api [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803423, 'name': ReconfigVM_Task, 'duration_secs': 0.367706} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.643542] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a/volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1995.648706] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1995.648706] env[62519]: value = "task-1803424" [ 1995.648706] env[62519]: _type = "Task" [ 1995.648706] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.648910] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1279a228-1d1b-454d-b72c-b9b0a25a0f82 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.667897] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803424, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.669489] env[62519]: DEBUG oslo_vmware.api [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 1995.669489] env[62519]: value = "task-1803425" [ 1995.669489] env[62519]: _type = "Task" [ 1995.669489] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.678566] env[62519]: DEBUG oslo_vmware.api [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803425, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.813849] env[62519]: DEBUG nova.network.neutron [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.840152] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a4e91c-1a3a-7446-1a4d-69627c3396f0, 'name': SearchDatastore_Task, 'duration_secs': 0.010976} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.840362] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1995.840852] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1995.840852] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.841010] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.841202] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1995.841463] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f54a92e-b592-4550-a2ff-4911dbf2693e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.851507] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1995.851507] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1995.852228] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b59b016-f711-4335-a04b-e9c2497fb3b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.860566] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1995.860566] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52422219-430d-c0fb-2029-e30a79ab5f76" [ 1995.860566] env[62519]: _type = "Task" [ 1995.860566] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.869550] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52422219-430d-c0fb-2029-e30a79ab5f76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.948021] env[62519]: DEBUG nova.network.neutron [req-b84dfd07-d8aa-43da-a7f8-43a8af019a33 req-2be115df-4f29-4a7d-871a-e1ecebfb1423 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Updated VIF entry in instance network info cache for port a8915627-fabb-4472-92ee-8c09b3c07a92. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1995.948400] env[62519]: DEBUG nova.network.neutron [req-b84dfd07-d8aa-43da-a7f8-43a8af019a33 req-2be115df-4f29-4a7d-871a-e1ecebfb1423 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Updating instance_info_cache with network_info: [{"id": "a8915627-fabb-4472-92ee-8c09b3c07a92", "address": "fa:16:3e:55:40:95", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8915627-fa", "ovs_interfaceid": "a8915627-fabb-4472-92ee-8c09b3c07a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.061642] env[62519]: DEBUG oslo_vmware.api [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803410, 'name': ReconfigVM_Task, 'duration_secs': 5.888818} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.062047] env[62519]: DEBUG oslo_concurrency.lockutils [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.062170] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfigured VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1996.170758] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803424, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.178913] env[62519]: DEBUG oslo_vmware.api [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803425, 'name': ReconfigVM_Task, 'duration_secs': 0.144884} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.179230] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373871', 'volume_id': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'name': 'volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9bf88b2f-63f9-466b-8669-45f17319055d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'serial': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1996.317528] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.372048] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52422219-430d-c0fb-2029-e30a79ab5f76, 'name': SearchDatastore_Task, 'duration_secs': 0.009851} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.374011] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53433128-8ad4-4976-8a03-0a89107def8c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.380292] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1996.380292] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525be05b-b235-6a0f-fb0a-dbccc4000d3a" [ 1996.380292] env[62519]: _type = "Task" [ 1996.380292] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.388736] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525be05b-b235-6a0f-fb0a-dbccc4000d3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.441654] env[62519]: DEBUG nova.network.neutron [-] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.454883] env[62519]: DEBUG oslo_concurrency.lockutils [req-b84dfd07-d8aa-43da-a7f8-43a8af019a33 req-2be115df-4f29-4a7d-871a-e1ecebfb1423 service nova] Releasing lock "refresh_cache-c9399643-7d74-4064-a721-e6d038a5cef0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.670913] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803424, 'name': CreateSnapshot_Task, 'duration_secs': 0.811523} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.671195] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1996.672144] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408b5591-b740-4ef0-a1e4-858493b747fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.843706] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8e9175-4363-43b6-8f7b-b97c97d60ced {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.864973] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5b5ece-4c0b-44ca-982e-81d35cdef397 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.872342] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance '8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1996.890821] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525be05b-b235-6a0f-fb0a-dbccc4000d3a, 'name': SearchDatastore_Task, 'duration_secs': 0.010918} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.890821] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.890821] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c9399643-7d74-4064-a721-e6d038a5cef0/c9399643-7d74-4064-a721-e6d038a5cef0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1996.891098] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e3a67f1-bd37-4f88-8a34-6b6c38ecf6c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.898283] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1996.898283] env[62519]: value = "task-1803426" [ 1996.898283] env[62519]: _type = "Task" [ 1996.898283] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.906270] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803426, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.926207] env[62519]: DEBUG nova.compute.manager [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-vif-deleted-ebf65ce3-5f98-44e5-95de-e607cb9b75e3 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1996.927242] env[62519]: INFO nova.compute.manager [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Neutron deleted interface ebf65ce3-5f98-44e5-95de-e607cb9b75e3; detaching it from the instance and deleting it from the info cache [ 1996.927582] env[62519]: DEBUG nova.network.neutron [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0f1a716e-2238-498b-9d31-8516cb0d084d", "address": "fa:16:3e:6b:ae:3d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f1a716e-22", "ovs_interfaceid": "0f1a716e-2238-498b-9d31-8516cb0d084d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.944388] env[62519]: INFO nova.compute.manager [-] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Took 1.53 seconds to deallocate network for instance. [ 1997.050926] env[62519]: DEBUG nova.compute.manager [req-87eeb391-24fe-4d95-ad48-b1ec19fea958 req-d30527b6-7875-4c07-bfaa-1b0d86a3c9a2 service nova] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Received event network-vif-deleted-e05c875b-eb82-4265-a67c-6c626a2365be {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1997.051165] env[62519]: DEBUG nova.compute.manager [req-87eeb391-24fe-4d95-ad48-b1ec19fea958 req-d30527b6-7875-4c07-bfaa-1b0d86a3c9a2 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-vif-deleted-0f1a716e-2238-498b-9d31-8516cb0d084d {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 1997.051334] env[62519]: INFO nova.compute.manager [req-87eeb391-24fe-4d95-ad48-b1ec19fea958 req-d30527b6-7875-4c07-bfaa-1b0d86a3c9a2 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Neutron deleted interface 0f1a716e-2238-498b-9d31-8516cb0d084d; detaching it from the instance and deleting it from the info cache [ 1997.051575] env[62519]: DEBUG nova.network.neutron [req-87eeb391-24fe-4d95-ad48-b1ec19fea958 req-d30527b6-7875-4c07-bfaa-1b0d86a3c9a2 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1997.192966] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1997.193842] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0b2ce953-7455-44c5-8104-dd5270d2a9e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.205556] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 1997.205556] env[62519]: value = "task-1803427" [ 1997.205556] env[62519]: _type = "Task" [ 1997.205556] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.214722] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803427, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.216745] env[62519]: DEBUG nova.objects.instance [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'flavor' on Instance uuid 9bf88b2f-63f9-466b-8669-45f17319055d {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1997.278010] env[62519]: DEBUG oslo_concurrency.lockutils [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1997.278227] env[62519]: DEBUG oslo_concurrency.lockutils [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1997.278409] env[62519]: DEBUG nova.network.neutron [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1997.381276] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1997.381657] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3402f1e5-9c0d-476d-bcbd-5cc33dfca828 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.391702] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 1997.391702] env[62519]: value = "task-1803428" [ 1997.391702] env[62519]: _type = "Task" [ 1997.391702] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.400229] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803428, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.409013] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803426, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489017} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.409553] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] c9399643-7d74-4064-a721-e6d038a5cef0/c9399643-7d74-4064-a721-e6d038a5cef0.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1997.409553] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1997.409808] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa94b3fa-ee35-4d40-9184-9d05a999c007 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.416427] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1997.416427] env[62519]: value = "task-1803429" [ 1997.416427] env[62519]: _type = "Task" [ 1997.416427] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.426647] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803429, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.430374] env[62519]: DEBUG oslo_concurrency.lockutils [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1997.430525] env[62519]: DEBUG oslo_concurrency.lockutils [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Acquired lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1997.431353] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423fcacc-e87a-4875-93ff-898332bad59d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.450512] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1878f857-54ff-45b9-822d-6b8eef619393 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.453958] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.454235] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.454473] env[62519]: DEBUG nova.objects.instance [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lazy-loading 'resources' on Instance uuid 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1997.482191] env[62519]: DEBUG nova.virt.vmwareapi.vmops [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfiguring VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1997.483411] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.483656] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d120c8f-6ad8-4e3d-bc30-c39a9e49a6c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.504957] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Waiting for the task: (returnval){ [ 1997.504957] env[62519]: value = "task-1803430" [ 1997.504957] env[62519]: _type = "Task" [ 1997.504957] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.514666] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.554338] env[62519]: DEBUG oslo_concurrency.lockutils [req-87eeb391-24fe-4d95-ad48-b1ec19fea958 req-d30527b6-7875-4c07-bfaa-1b0d86a3c9a2 service nova] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1997.717122] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803427, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.721823] env[62519]: DEBUG oslo_concurrency.lockutils [None req-99d38bd8-efaa-4460-9801-774365a42e50 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.296s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.902872] env[62519]: DEBUG oslo_vmware.api [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803428, 'name': PowerOnVM_Task, 'duration_secs': 0.419173} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.903180] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1997.903371] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-99c36ef8-ebfa-4cae-b0fe-e8aa228c0e3f tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance '8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1997.925790] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803429, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083578} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.926065] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1997.926994] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafdb0c3-7fd7-4b8b-90ce-b11a4ec81d01 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.950612] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] c9399643-7d74-4064-a721-e6d038a5cef0/c9399643-7d74-4064-a721-e6d038a5cef0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1997.953341] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5f91f76-0116-4edb-a4eb-15a748f6d2bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.978615] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1997.978615] env[62519]: value = "task-1803431" [ 1997.978615] env[62519]: _type = "Task" [ 1997.978615] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.989990] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803431, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.018362] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.044183] env[62519]: DEBUG nova.network.neutron [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [{"id": "5a25cf20-af43-4653-8729-93e5e73c5891", "address": "fa:16:3e:9b:6d:c4", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a25cf20-af", "ovs_interfaceid": "5a25cf20-af43-4653-8729-93e5e73c5891", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1998.086188] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42b8121-b7e2-4136-9da6-d020ec317537 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.095043] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d614633-4247-4580-92da-ca1dcfda9711 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.127612] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e203255-445d-415c-a610-55e9bfda5c2e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.135642] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d207703-42d8-403f-a91a-15521d2925fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.149498] env[62519]: DEBUG nova.compute.provider_tree [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1998.216617] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803427, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.490833] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803431, 'name': ReconfigVM_Task, 'duration_secs': 0.445117} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.491177] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Reconfigured VM instance instance-00000074 to attach disk [datastore1] c9399643-7d74-4064-a721-e6d038a5cef0/c9399643-7d74-4064-a721-e6d038a5cef0.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1998.491865] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ba4ca9b-e734-4f06-8bd6-146c5f96263a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.500135] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1998.500135] env[62519]: value = "task-1803432" [ 1998.500135] env[62519]: _type = "Task" [ 1998.500135] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.511321] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803432, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.520533] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.548068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1998.565881] env[62519]: DEBUG nova.compute.manager [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Stashing vm_state: active {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 1998.653274] env[62519]: DEBUG nova.scheduler.client.report [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1998.716874] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803427, 'name': CloneVM_Task, 'duration_secs': 1.416099} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.717243] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Created linked-clone VM from snapshot [ 1998.718038] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342962e2-dce8-4640-bb97-88241ea16084 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.725485] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Uploading image f3e835e2-df44-417d-9a40-88b6c8346b2f {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1998.754500] env[62519]: DEBUG oslo_vmware.rw_handles [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1998.754500] env[62519]: value = "vm-373874" [ 1998.754500] env[62519]: _type = "VirtualMachine" [ 1998.754500] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1998.754777] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fe6ebc81-f289-4b23-8a1e-403a7128417f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.762812] env[62519]: DEBUG oslo_vmware.rw_handles [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lease: (returnval){ [ 1998.762812] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256ccdc-0bf6-df4a-247c-8c23f44612b0" [ 1998.762812] env[62519]: _type = "HttpNfcLease" [ 1998.762812] env[62519]: } obtained for exporting VM: (result){ [ 1998.762812] env[62519]: value = "vm-373874" [ 1998.762812] env[62519]: _type = "VirtualMachine" [ 1998.762812] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1998.763147] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the lease: (returnval){ [ 1998.763147] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256ccdc-0bf6-df4a-247c-8c23f44612b0" [ 1998.763147] env[62519]: _type = "HttpNfcLease" [ 1998.763147] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1998.770341] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1998.770341] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256ccdc-0bf6-df4a-247c-8c23f44612b0" [ 1998.770341] env[62519]: _type = "HttpNfcLease" [ 1998.770341] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1999.010947] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803432, 'name': Rename_Task, 'duration_secs': 0.162859} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.011333] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1999.014219] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-201da14e-0ea1-4cfa-b2e9-b5c32cd44c5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.020853] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.022067] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 1999.022067] env[62519]: value = "task-1803434" [ 1999.022067] env[62519]: _type = "Task" [ 1999.022067] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.029269] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.051656] env[62519]: DEBUG oslo_concurrency.lockutils [None req-844373a7-97dd-4804-b9b9-4b3b6986d93f tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-c884a374-ffb8-48db-97bb-d64a687694d5-0f1a716e-2238-498b-9d31-8516cb0d084d" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.598s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.085403] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.159251] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.161719] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.076s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.178817] env[62519]: INFO nova.scheduler.client.report [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Deleted allocations for instance 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0 [ 1999.273085] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1999.273085] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256ccdc-0bf6-df4a-247c-8c23f44612b0" [ 1999.273085] env[62519]: _type = "HttpNfcLease" [ 1999.273085] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1999.273500] env[62519]: DEBUG oslo_vmware.rw_handles [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1999.273500] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5256ccdc-0bf6-df4a-247c-8c23f44612b0" [ 1999.273500] env[62519]: _type = "HttpNfcLease" [ 1999.273500] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1999.274207] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48087d0f-beb2-44df-9a56-4886b5652820 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.281854] env[62519]: DEBUG oslo_vmware.rw_handles [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522cc263-d041-1ada-bd90-589b29845fa3/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1999.282068] env[62519]: DEBUG oslo_vmware.rw_handles [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522cc263-d041-1ada-bd90-589b29845fa3/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1999.371072] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4a8facba-2849-4d90-bef2-8d870f4ea7ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.521624] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.531921] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803434, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.667456] env[62519]: INFO nova.compute.claims [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1999.685873] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ff17eaa8-c2af-45d2-af04-7fec7fbff74c tempest-ServerMetadataNegativeTestJSON-1024005537 tempest-ServerMetadataNegativeTestJSON-1024005537-project-member] Lock "475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.935s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.030391] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.036938] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803434, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.174169] env[62519]: INFO nova.compute.resource_tracker [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating resource usage from migration ecebd4e9-7aad-4915-8ba8-7e59c0759308 [ 2000.306828] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d021865a-2353-40f4-b436-27d9fe14eacc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.316407] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f282794-b49f-4a9b-b216-e2a067f1abf7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.352305] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1eebda7-e1a8-4093-a49b-d1e64e80cfcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.362350] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a662be5b-e894-4ca3-92f6-cd1c570f8420 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.382932] env[62519]: DEBUG nova.compute.provider_tree [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2000.522460] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.532822] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803434, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.622783] env[62519]: DEBUG oslo_concurrency.lockutils [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.623062] env[62519]: DEBUG oslo_concurrency.lockutils [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.623342] env[62519]: DEBUG nova.compute.manager [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Going to confirm migration 8 {{(pid=62519) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5235}} [ 2000.886517] env[62519]: DEBUG nova.scheduler.client.report [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2001.033479] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.040177] env[62519]: DEBUG oslo_vmware.api [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803434, 'name': PowerOnVM_Task, 'duration_secs': 1.682082} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.040697] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2001.040993] env[62519]: INFO nova.compute.manager [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Took 8.26 seconds to spawn the instance on the hypervisor. [ 2001.041237] env[62519]: DEBUG nova.compute.manager [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2001.042495] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d8725e-4178-4085-adee-f605a6d47281 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.238773] env[62519]: DEBUG oslo_concurrency.lockutils [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2001.239082] env[62519]: DEBUG oslo_concurrency.lockutils [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2001.239321] env[62519]: DEBUG nova.network.neutron [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2001.239651] env[62519]: DEBUG nova.objects.instance [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'info_cache' on Instance uuid 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2001.392059] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.230s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.392399] env[62519]: INFO nova.compute.manager [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Migrating [ 2001.527194] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.570970] env[62519]: INFO nova.compute.manager [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Took 15.36 seconds to build instance. [ 2001.914292] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2001.914538] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2001.914730] env[62519]: DEBUG nova.network.neutron [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2001.992964] env[62519]: DEBUG nova.compute.manager [req-295aceab-bf02-41fd-9b21-f4c9749c2c42 req-f8a17d3a-6992-4808-b4b7-36dfdb6c3ea3 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Received event network-changed-a8915627-fabb-4472-92ee-8c09b3c07a92 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2001.993328] env[62519]: DEBUG nova.compute.manager [req-295aceab-bf02-41fd-9b21-f4c9749c2c42 req-f8a17d3a-6992-4808-b4b7-36dfdb6c3ea3 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Refreshing instance network info cache due to event network-changed-a8915627-fabb-4472-92ee-8c09b3c07a92. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2001.993455] env[62519]: DEBUG oslo_concurrency.lockutils [req-295aceab-bf02-41fd-9b21-f4c9749c2c42 req-f8a17d3a-6992-4808-b4b7-36dfdb6c3ea3 service nova] Acquiring lock "refresh_cache-c9399643-7d74-4064-a721-e6d038a5cef0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2001.993598] env[62519]: DEBUG oslo_concurrency.lockutils [req-295aceab-bf02-41fd-9b21-f4c9749c2c42 req-f8a17d3a-6992-4808-b4b7-36dfdb6c3ea3 service nova] Acquired lock "refresh_cache-c9399643-7d74-4064-a721-e6d038a5cef0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2001.993761] env[62519]: DEBUG nova.network.neutron [req-295aceab-bf02-41fd-9b21-f4c9749c2c42 req-f8a17d3a-6992-4808-b4b7-36dfdb6c3ea3 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Refreshing network info cache for port a8915627-fabb-4472-92ee-8c09b3c07a92 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2002.031737] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.526519] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.747011] env[62519]: DEBUG nova.network.neutron [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [{"id": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "address": "fa:16:3e:5b:29:03", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3e2a054-48", "ovs_interfaceid": "c3e2a054-4826-4bd6-8c9e-74005e7912e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.022836] env[62519]: DEBUG nova.network.neutron [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [{"id": "2ee072d4-2bdb-4a83-90dd-06086f515634", "address": "fa:16:3e:e6:86:ec", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee072d4-2b", "ovs_interfaceid": "2ee072d4-2bdb-4a83-90dd-06086f515634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.027684] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.029130] env[62519]: DEBUG nova.network.neutron [req-295aceab-bf02-41fd-9b21-f4c9749c2c42 req-f8a17d3a-6992-4808-b4b7-36dfdb6c3ea3 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Updated VIF entry in instance network info cache for port a8915627-fabb-4472-92ee-8c09b3c07a92. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2003.029459] env[62519]: DEBUG nova.network.neutron [req-295aceab-bf02-41fd-9b21-f4c9749c2c42 req-f8a17d3a-6992-4808-b4b7-36dfdb6c3ea3 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Updating instance_info_cache with network_info: [{"id": "a8915627-fabb-4472-92ee-8c09b3c07a92", "address": "fa:16:3e:55:40:95", "network": {"id": "06bc21fc-5712-4650-9d06-18e9cc6afd29", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1062109402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17cd969b1e7d4bd795748560caf80077", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8915627-fa", "ovs_interfaceid": "a8915627-fabb-4472-92ee-8c09b3c07a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.077793] env[62519]: DEBUG oslo_concurrency.lockutils [None req-65ccd04b-b2b0-4788-8ffd-357ca8d5d18d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.877s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.254799] env[62519]: DEBUG oslo_concurrency.lockutils [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2003.255112] env[62519]: DEBUG nova.objects.instance [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'migration_context' on Instance uuid 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2003.526192] env[62519]: DEBUG oslo_vmware.api [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Task: {'id': task-1803430, 'name': ReconfigVM_Task, 'duration_secs': 5.812288} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.526811] env[62519]: DEBUG oslo_concurrency.lockutils [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] Releasing lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2003.527090] env[62519]: DEBUG nova.virt.vmwareapi.vmops [req-aa329333-9c37-4307-bf53-a07595865c7e req-d1019c25-93c0-404f-abcc-17309b5d628c service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Reconfigured VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2003.527533] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "c884a374-ffb8-48db-97bb-d64a687694d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.044s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.527854] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.527957] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.528139] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "c884a374-ffb8-48db-97bb-d64a687694d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.529911] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2003.531325] env[62519]: INFO nova.compute.manager [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Terminating instance [ 2003.532820] env[62519]: DEBUG oslo_concurrency.lockutils [req-295aceab-bf02-41fd-9b21-f4c9749c2c42 req-f8a17d3a-6992-4808-b4b7-36dfdb6c3ea3 service nova] Releasing lock "refresh_cache-c9399643-7d74-4064-a721-e6d038a5cef0" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2003.758709] env[62519]: DEBUG nova.objects.base [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Object Instance<8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8> lazy-loaded attributes: info_cache,migration_context {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2003.759691] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789be5dd-31b5-411b-b59c-380415e18d63 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.779996] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c62d096b-dc85-48cc-8d79-42686c8c93d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.786340] env[62519]: DEBUG oslo_vmware.api [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2003.786340] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cd4263-05a0-19ce-ab68-20ad5793f2db" [ 2003.786340] env[62519]: _type = "Task" [ 2003.786340] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.795559] env[62519]: DEBUG oslo_vmware.api [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cd4263-05a0-19ce-ab68-20ad5793f2db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.037742] env[62519]: DEBUG nova.compute.manager [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2004.038016] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2004.039159] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0457992e-13bc-42f0-950a-7288bdfec6b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.047371] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2004.047654] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bb76cd8-0ae3-4787-9fd4-1a75cc5f04a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.055725] env[62519]: DEBUG oslo_vmware.api [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2004.055725] env[62519]: value = "task-1803435" [ 2004.055725] env[62519]: _type = "Task" [ 2004.055725] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.065821] env[62519]: DEBUG oslo_vmware.api [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803435, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.299173] env[62519]: DEBUG oslo_vmware.api [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cd4263-05a0-19ce-ab68-20ad5793f2db, 'name': SearchDatastore_Task, 'duration_secs': 0.010057} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.299173] env[62519]: DEBUG oslo_concurrency.lockutils [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2004.299173] env[62519]: DEBUG oslo_concurrency.lockutils [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.569438] env[62519]: DEBUG oslo_vmware.api [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803435, 'name': PowerOffVM_Task, 'duration_secs': 0.403626} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.570042] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2004.570233] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2004.570504] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56edf916-e511-4dc8-9ebd-ecf6422e7c78 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.667700] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2004.668209] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2004.668467] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleting the datastore file [datastore1] c884a374-ffb8-48db-97bb-d64a687694d5 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2004.668911] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebbf8b0c-3b95-4fd7-8290-afcfa28345a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.675956] env[62519]: DEBUG oslo_vmware.api [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2004.675956] env[62519]: value = "task-1803437" [ 2004.675956] env[62519]: _type = "Task" [ 2004.675956] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.685231] env[62519]: DEBUG oslo_vmware.api [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803437, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.926609] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6c3820-b95d-4b0e-ac32-bad1ad483509 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.934945] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f605a8de-9057-4c94-aa8f-fc8fd0fc54c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.969138] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b11ee12-471d-4d1f-a6cd-120f4b0307fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.981053] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145a21ea-c61a-48be-9491-296ad3cd6da4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.001023] env[62519]: DEBUG nova.compute.provider_tree [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2005.048832] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5437bc46-2ea4-4af6-bdb6-434877808e62 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.072045] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance '9bf88b2f-63f9-466b-8669-45f17319055d' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2005.187585] env[62519]: DEBUG oslo_vmware.api [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803437, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.353762} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.188782] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2005.188782] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2005.188782] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2005.188782] env[62519]: INFO nova.compute.manager [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2005.188782] env[62519]: DEBUG oslo.service.loopingcall [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2005.189197] env[62519]: DEBUG nova.compute.manager [-] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2005.189197] env[62519]: DEBUG nova.network.neutron [-] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2005.504586] env[62519]: DEBUG nova.scheduler.client.report [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2005.578627] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2005.578981] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2b2e2ae-5763-4d2c-a584-1776bdb2e4f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.587622] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2005.587622] env[62519]: value = "task-1803438" [ 2005.587622] env[62519]: _type = "Task" [ 2005.587622] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.598350] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803438, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.758056] env[62519]: DEBUG nova.compute.manager [req-8bdffe07-4d1d-437f-bd88-e15bad9e360b req-c9affb49-f3a4-4f4d-83e8-2e0599a18457 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Received event network-vif-deleted-5a25cf20-af43-4653-8729-93e5e73c5891 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2005.758286] env[62519]: INFO nova.compute.manager [req-8bdffe07-4d1d-437f-bd88-e15bad9e360b req-c9affb49-f3a4-4f4d-83e8-2e0599a18457 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Neutron deleted interface 5a25cf20-af43-4653-8729-93e5e73c5891; detaching it from the instance and deleting it from the info cache [ 2005.758472] env[62519]: DEBUG nova.network.neutron [req-8bdffe07-4d1d-437f-bd88-e15bad9e360b req-c9affb49-f3a4-4f4d-83e8-2e0599a18457 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.850612] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.850612] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.101240] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803438, 'name': PowerOffVM_Task, 'duration_secs': 0.222664} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.101240] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2006.101240] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance '9bf88b2f-63f9-466b-8669-45f17319055d' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2006.159023] env[62519]: DEBUG nova.network.neutron [-] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.262507] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-863ab5b0-72b1-4bb4-9955-4aceea40ef5d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.275077] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91431e44-ca66-4a8f-ba8f-c1d571900c2d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.312917] env[62519]: DEBUG nova.compute.manager [req-8bdffe07-4d1d-437f-bd88-e15bad9e360b req-c9affb49-f3a4-4f4d-83e8-2e0599a18457 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Detach interface failed, port_id=5a25cf20-af43-4653-8729-93e5e73c5891, reason: Instance c884a374-ffb8-48db-97bb-d64a687694d5 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2006.352248] env[62519]: DEBUG nova.compute.manager [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2006.517099] env[62519]: DEBUG oslo_concurrency.lockutils [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.219s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.606806] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2006.607307] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2006.607413] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2006.607752] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2006.608086] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2006.608319] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2006.608579] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2006.608813] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2006.609059] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2006.609255] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2006.609528] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2006.614751] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10c41026-7c55-4c53-a5a9-fd2eb259265c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.632933] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2006.632933] env[62519]: value = "task-1803439" [ 2006.632933] env[62519]: _type = "Task" [ 2006.632933] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.644801] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.661662] env[62519]: INFO nova.compute.manager [-] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Took 1.47 seconds to deallocate network for instance. [ 2006.879742] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.880043] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.881940] env[62519]: INFO nova.compute.claims [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2007.092376] env[62519]: INFO nova.scheduler.client.report [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted allocation for migration 167dbeb9-758a-4673-a767-5e06201c0522 [ 2007.143998] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803439, 'name': ReconfigVM_Task, 'duration_secs': 0.288815} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.144341] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance '9bf88b2f-63f9-466b-8669-45f17319055d' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2007.169291] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.598489] env[62519]: DEBUG oslo_concurrency.lockutils [None req-993607c4-9323-4c27-98aa-b4453ca6fe97 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.975s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.650920] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2007.651318] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2007.651422] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2007.651658] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2007.651896] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2007.652131] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2007.652402] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2007.653027] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2007.653027] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2007.653027] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2007.653397] env[62519]: DEBUG nova.virt.hardware [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2007.659811] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2007.660632] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6a4439c-97e1-4944-a3d3-3517ae349f90 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.680699] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2007.680699] env[62519]: value = "task-1803440" [ 2007.680699] env[62519]: _type = "Task" [ 2007.680699] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.689473] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.987407] env[62519]: DEBUG oslo_concurrency.lockutils [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.987836] env[62519]: DEBUG oslo_concurrency.lockutils [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.988089] env[62519]: DEBUG oslo_concurrency.lockutils [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.988284] env[62519]: DEBUG oslo_concurrency.lockutils [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.988452] env[62519]: DEBUG oslo_concurrency.lockutils [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.990740] env[62519]: INFO nova.compute.manager [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Terminating instance [ 2008.006111] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e724972d-2a2f-4992-a931-b2e191ba4f29 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.015070] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e727bd6-b117-4062-985f-daae11518f20 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.046876] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3048a7b6-dc8f-462d-a83c-d14d8fe67d2c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.058134] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c240d7d5-d5e8-4e7c-b431-1c82986734d8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.074757] env[62519]: DEBUG nova.compute.provider_tree [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2008.191720] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803440, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.495147] env[62519]: DEBUG nova.compute.manager [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2008.495405] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2008.496361] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afe0252-10ec-41bf-9bd3-dddfcee9ce5e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.505663] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2008.505953] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-256a0433-486f-43d1-a3de-5ba1f809d914 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.513982] env[62519]: DEBUG oslo_vmware.api [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2008.513982] env[62519]: value = "task-1803441" [ 2008.513982] env[62519]: _type = "Task" [ 2008.513982] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.523171] env[62519]: DEBUG oslo_vmware.api [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803441, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.577941] env[62519]: DEBUG nova.scheduler.client.report [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2008.696135] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803440, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.024927] env[62519]: DEBUG oslo_vmware.api [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803441, 'name': PowerOffVM_Task, 'duration_secs': 0.274163} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.025182] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2009.025365] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2009.025609] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e72b54a3-043c-49ee-bf2a-973d4f8b8317 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.085615] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.086242] env[62519]: DEBUG nova.compute.manager [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2009.089237] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.920s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.089498] env[62519]: DEBUG nova.objects.instance [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'resources' on Instance uuid c884a374-ffb8-48db-97bb-d64a687694d5 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2009.181656] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2009.181935] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2009.182152] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleting the datastore file [datastore1] 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2009.182512] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46e1ea55-6e71-4179-8b62-64a0aa4b9437 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.193700] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803440, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.195155] env[62519]: DEBUG oslo_vmware.api [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2009.195155] env[62519]: value = "task-1803443" [ 2009.195155] env[62519]: _type = "Task" [ 2009.195155] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.204067] env[62519]: DEBUG oslo_vmware.api [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803443, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.593193] env[62519]: DEBUG nova.compute.utils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2009.598297] env[62519]: DEBUG nova.compute.manager [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2009.598733] env[62519]: DEBUG nova.network.neutron [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2009.638976] env[62519]: DEBUG nova.policy [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eca1da9825a74bb6acb5695149518624', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8eb0014c990b48ba8f9cfc5629d72f78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2009.695739] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803440, 'name': ReconfigVM_Task, 'duration_secs': 1.526501} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.695739] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2009.696290] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ee029f-4cb1-490e-8393-5cb7769a67d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.703166] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f979a42-7a79-47ba-b793-6b6d1a12e92c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.728989] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 9bf88b2f-63f9-466b-8669-45f17319055d/9bf88b2f-63f9-466b-8669-45f17319055d.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2009.733983] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f192c82f-7c17-47c2-922d-943d76663297 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.746859] env[62519]: DEBUG oslo_vmware.api [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803443, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195319} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.747915] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2009.748451] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2009.748710] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2009.748922] env[62519]: INFO nova.compute.manager [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2009.749234] env[62519]: DEBUG oslo.service.loopingcall [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2009.750414] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce0e6ef-246d-477f-a69d-050fb67016d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.753892] env[62519]: DEBUG nova.compute.manager [-] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2009.754032] env[62519]: DEBUG nova.network.neutron [-] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2009.787465] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee5c730-2f2c-4d84-a7db-cd5a05fc389a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.790404] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2009.790404] env[62519]: value = "task-1803444" [ 2009.790404] env[62519]: _type = "Task" [ 2009.790404] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.800230] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa1579b-581a-4542-8c50-1adc1bb82549 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.804921] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.815296] env[62519]: DEBUG nova.compute.provider_tree [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2009.844582] env[62519]: DEBUG oslo_vmware.rw_handles [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522cc263-d041-1ada-bd90-589b29845fa3/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2009.845499] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1950b9a7-0236-41b0-9ffb-57d310953e5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.852623] env[62519]: DEBUG oslo_vmware.rw_handles [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522cc263-d041-1ada-bd90-589b29845fa3/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2009.852786] env[62519]: ERROR oslo_vmware.rw_handles [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522cc263-d041-1ada-bd90-589b29845fa3/disk-0.vmdk due to incomplete transfer. [ 2009.853016] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-73a0460f-d506-4e53-bc73-aae55a732092 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.861174] env[62519]: DEBUG oslo_vmware.rw_handles [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522cc263-d041-1ada-bd90-589b29845fa3/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2009.861374] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Uploaded image f3e835e2-df44-417d-9a40-88b6c8346b2f to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2009.864257] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2009.864257] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7bc317b3-2d99-41ad-a926-c7fc4135ed25 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.875018] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2009.875018] env[62519]: value = "task-1803445" [ 2009.875018] env[62519]: _type = "Task" [ 2009.875018] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.880467] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803445, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.021690] env[62519]: DEBUG nova.network.neutron [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Successfully created port: 9c28f995-e2b2-4556-a305-8b9194fd6e11 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2010.099345] env[62519]: DEBUG nova.compute.manager [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2010.300633] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803444, 'name': ReconfigVM_Task, 'duration_secs': 0.327351} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.300928] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 9bf88b2f-63f9-466b-8669-45f17319055d/9bf88b2f-63f9-466b-8669-45f17319055d.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2010.301257] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance '9bf88b2f-63f9-466b-8669-45f17319055d' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2010.318987] env[62519]: DEBUG nova.scheduler.client.report [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2010.387442] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803445, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.616208] env[62519]: DEBUG nova.compute.manager [req-91fac35b-8b02-42d0-b71e-d676179d32f2 req-e033bf6c-ec2a-48a4-8d54-40f001f760ab service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Received event network-vif-deleted-c3e2a054-4826-4bd6-8c9e-74005e7912e4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2010.616208] env[62519]: INFO nova.compute.manager [req-91fac35b-8b02-42d0-b71e-d676179d32f2 req-e033bf6c-ec2a-48a4-8d54-40f001f760ab service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Neutron deleted interface c3e2a054-4826-4bd6-8c9e-74005e7912e4; detaching it from the instance and deleting it from the info cache [ 2010.616208] env[62519]: DEBUG nova.network.neutron [req-91fac35b-8b02-42d0-b71e-d676179d32f2 req-e033bf6c-ec2a-48a4-8d54-40f001f760ab service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2010.808593] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0e1273-1c9e-478d-a005-95e5d9419a8e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.832047] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.743s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.834804] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77a72dd-a351-46d5-8ee8-868ef449bbb2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.856324] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance '9bf88b2f-63f9-466b-8669-45f17319055d' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2010.860515] env[62519]: INFO nova.scheduler.client.report [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleted allocations for instance c884a374-ffb8-48db-97bb-d64a687694d5 [ 2010.883434] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803445, 'name': Destroy_Task, 'duration_secs': 0.821178} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.883434] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Destroyed the VM [ 2010.883434] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2010.883434] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bd5edc02-4df9-4e50-8443-b7088b1d46aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.891064] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2010.891064] env[62519]: value = "task-1803446" [ 2010.891064] env[62519]: _type = "Task" [ 2010.891064] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.899299] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803446, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.981472] env[62519]: DEBUG nova.network.neutron [-] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.111079] env[62519]: DEBUG nova.compute.manager [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2011.120452] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b944d99-54b6-4e12-8072-dc8b93f53b64 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.132118] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37af3d72-e045-412b-83a0-659473d63ca8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.152083] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2011.152363] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2011.152519] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2011.152701] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2011.152846] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2011.152991] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2011.153217] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2011.153375] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2011.153537] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2011.153698] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2011.153877] env[62519]: DEBUG nova.virt.hardware [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2011.154735] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484b445d-312f-4b9e-abf4-71407e0d7317 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.164969] env[62519]: DEBUG nova.compute.manager [req-91fac35b-8b02-42d0-b71e-d676179d32f2 req-e033bf6c-ec2a-48a4-8d54-40f001f760ab service nova] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Detach interface failed, port_id=c3e2a054-4826-4bd6-8c9e-74005e7912e4, reason: Instance 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2011.170608] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde9791c-b553-438f-851e-5f8f1be700a6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.368326] env[62519]: DEBUG oslo_concurrency.lockutils [None req-75a4d82d-ad50-423c-a7c8-9f45482e2140 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "c884a374-ffb8-48db-97bb-d64a687694d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.841s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.369342] env[62519]: DEBUG oslo_concurrency.lockutils [req-87eeb391-24fe-4d95-ad48-b1ec19fea958 req-d30527b6-7875-4c07-bfaa-1b0d86a3c9a2 service nova] Acquired lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.372599] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337a6df1-be80-4df8-a2cd-de5dca88af43 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.382178] env[62519]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2011.382350] env[62519]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62519) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2011.382723] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a467243-116f-480c-bba1-4d4f75989cbb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.393087] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437e1182-97bf-406c-84ba-10be9a6e9572 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.413375] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803446, 'name': RemoveSnapshot_Task, 'duration_secs': 0.472896} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.413638] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2011.413966] env[62519]: DEBUG nova.compute.manager [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2011.414746] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65b24d5-df63-4c11-9f48-2284a0469145 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.424775] env[62519]: ERROR root [req-87eeb391-24fe-4d95-ad48-b1ec19fea958 req-d30527b6-7875-4c07-bfaa-1b0d86a3c9a2 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-373854' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-373854' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-373854' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-373854'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-373854' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-373854' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-373854'}\n"]: nova.exception.InstanceNotFound: Instance c884a374-ffb8-48db-97bb-d64a687694d5 could not be found. [ 2011.424982] env[62519]: DEBUG oslo_concurrency.lockutils [req-87eeb391-24fe-4d95-ad48-b1ec19fea958 req-d30527b6-7875-4c07-bfaa-1b0d86a3c9a2 service nova] Releasing lock "c884a374-ffb8-48db-97bb-d64a687694d5" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.425185] env[62519]: DEBUG nova.compute.manager [req-87eeb391-24fe-4d95-ad48-b1ec19fea958 req-d30527b6-7875-4c07-bfaa-1b0d86a3c9a2 service nova] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Detach interface failed, port_id=0f1a716e-2238-498b-9d31-8516cb0d084d, reason: Instance c884a374-ffb8-48db-97bb-d64a687694d5 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2011.484863] env[62519]: INFO nova.compute.manager [-] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Took 1.73 seconds to deallocate network for instance. [ 2011.716571] env[62519]: DEBUG nova.network.neutron [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Successfully updated port: 9c28f995-e2b2-4556-a305-8b9194fd6e11 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2011.937353] env[62519]: INFO nova.compute.manager [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Shelve offloading [ 2011.991401] env[62519]: DEBUG oslo_concurrency.lockutils [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.991677] env[62519]: DEBUG oslo_concurrency.lockutils [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.991899] env[62519]: DEBUG oslo_concurrency.lockutils [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.014752] env[62519]: INFO nova.scheduler.client.report [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted allocations for instance 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8 [ 2012.222982] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "refresh_cache-90cdb4a1-6192-42e8-ad30-90faeefa4d18" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.223067] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "refresh_cache-90cdb4a1-6192-42e8-ad30-90faeefa4d18" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.223291] env[62519]: DEBUG nova.network.neutron [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2012.442083] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2012.442400] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9abc9c5-ea3c-4944-861f-d3c47ff00323 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.451917] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2012.451917] env[62519]: value = "task-1803447" [ 2012.451917] env[62519]: _type = "Task" [ 2012.451917] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.462014] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2012.462295] env[62519]: DEBUG nova.compute.manager [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2012.463135] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bceccd5d-2718-4fd7-adc5-43ddec229007 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.469245] env[62519]: DEBUG oslo_concurrency.lockutils [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.469516] env[62519]: DEBUG oslo_concurrency.lockutils [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.469731] env[62519]: DEBUG nova.network.neutron [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2012.521329] env[62519]: DEBUG oslo_concurrency.lockutils [None req-527ed473-2d7b-4c86-9940-20784498d6ea tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.534s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.533956] env[62519]: DEBUG nova.network.neutron [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Port 2ee072d4-2bdb-4a83-90dd-06086f515634 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2012.652596] env[62519]: DEBUG nova.compute.manager [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Received event network-vif-plugged-9c28f995-e2b2-4556-a305-8b9194fd6e11 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2012.652799] env[62519]: DEBUG oslo_concurrency.lockutils [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] Acquiring lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.653154] env[62519]: DEBUG oslo_concurrency.lockutils [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] Lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.653260] env[62519]: DEBUG oslo_concurrency.lockutils [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] Lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.653435] env[62519]: DEBUG nova.compute.manager [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] No waiting events found dispatching network-vif-plugged-9c28f995-e2b2-4556-a305-8b9194fd6e11 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2012.653597] env[62519]: WARNING nova.compute.manager [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Received unexpected event network-vif-plugged-9c28f995-e2b2-4556-a305-8b9194fd6e11 for instance with vm_state building and task_state spawning. [ 2012.653748] env[62519]: DEBUG nova.compute.manager [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Received event network-changed-9c28f995-e2b2-4556-a305-8b9194fd6e11 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2012.653896] env[62519]: DEBUG nova.compute.manager [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Refreshing instance network info cache due to event network-changed-9c28f995-e2b2-4556-a305-8b9194fd6e11. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2012.654080] env[62519]: DEBUG oslo_concurrency.lockutils [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] Acquiring lock "refresh_cache-90cdb4a1-6192-42e8-ad30-90faeefa4d18" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.758657] env[62519]: DEBUG nova.network.neutron [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2012.875333] env[62519]: DEBUG nova.network.neutron [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Updating instance_info_cache with network_info: [{"id": "9c28f995-e2b2-4556-a305-8b9194fd6e11", "address": "fa:16:3e:eb:ed:d5", "network": {"id": "8bab7e32-d01d-4dff-8b50-f49f6a04314c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1759123998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8eb0014c990b48ba8f9cfc5629d72f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c28f995-e2", "ovs_interfaceid": "9c28f995-e2b2-4556-a305-8b9194fd6e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.190608] env[62519]: DEBUG nova.network.neutron [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating instance_info_cache with network_info: [{"id": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "address": "fa:16:3e:82:31:6c", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a08cba9-bd", "ovs_interfaceid": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.377893] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "refresh_cache-90cdb4a1-6192-42e8-ad30-90faeefa4d18" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.378275] env[62519]: DEBUG nova.compute.manager [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Instance network_info: |[{"id": "9c28f995-e2b2-4556-a305-8b9194fd6e11", "address": "fa:16:3e:eb:ed:d5", "network": {"id": "8bab7e32-d01d-4dff-8b50-f49f6a04314c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1759123998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8eb0014c990b48ba8f9cfc5629d72f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c28f995-e2", "ovs_interfaceid": "9c28f995-e2b2-4556-a305-8b9194fd6e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2013.378582] env[62519]: DEBUG oslo_concurrency.lockutils [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] Acquired lock "refresh_cache-90cdb4a1-6192-42e8-ad30-90faeefa4d18" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.378788] env[62519]: DEBUG nova.network.neutron [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Refreshing network info cache for port 9c28f995-e2b2-4556-a305-8b9194fd6e11 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2013.380034] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:ed:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c28f995-e2b2-4556-a305-8b9194fd6e11', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2013.387469] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Creating folder: Project (8eb0014c990b48ba8f9cfc5629d72f78). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2013.388463] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a86e96f-2942-444f-b9f6-6cd1dbeb0402 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.403788] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Created folder: Project (8eb0014c990b48ba8f9cfc5629d72f78) in parent group-v373567. [ 2013.403788] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Creating folder: Instances. Parent ref: group-v373875. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2013.404081] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-608755da-0e89-4bbf-9878-4fb7a9bae97d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.416422] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Created folder: Instances in parent group-v373875. [ 2013.416767] env[62519]: DEBUG oslo.service.loopingcall [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2013.416979] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2013.417210] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f79acc3-c3f9-4cb6-a1ff-8bef7e6f1aae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.438015] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2013.438015] env[62519]: value = "task-1803450" [ 2013.438015] env[62519]: _type = "Task" [ 2013.438015] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.446023] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803450, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.558355] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "9bf88b2f-63f9-466b-8669-45f17319055d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.558573] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.558743] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.671580] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "a18af9b7-4548-42d1-8459-508298cb96dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.671816] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "a18af9b7-4548-42d1-8459-508298cb96dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.693316] env[62519]: DEBUG oslo_concurrency.lockutils [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.949214] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803450, 'name': CreateVM_Task, 'duration_secs': 0.372608} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.949384] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2013.950493] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2013.950654] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.950996] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2013.952263] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdcbc3dc-04be-4114-bd52-370cc46654b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.956542] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2013.956542] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5245e66e-fb59-7f54-adc8-43a86a237762" [ 2013.956542] env[62519]: _type = "Task" [ 2013.956542] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.965431] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5245e66e-fb59-7f54-adc8-43a86a237762, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.988934] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "4049de7f-4ace-4017-8f9a-63817de5f81c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.989183] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.158765] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2014.159721] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f0e320-ed90-47bc-8f60-9e6cce0456a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.168352] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2014.169171] env[62519]: DEBUG nova.network.neutron [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Updated VIF entry in instance network info cache for port 9c28f995-e2b2-4556-a305-8b9194fd6e11. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2014.169500] env[62519]: DEBUG nova.network.neutron [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Updating instance_info_cache with network_info: [{"id": "9c28f995-e2b2-4556-a305-8b9194fd6e11", "address": "fa:16:3e:eb:ed:d5", "network": {"id": "8bab7e32-d01d-4dff-8b50-f49f6a04314c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1759123998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8eb0014c990b48ba8f9cfc5629d72f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c28f995-e2", "ovs_interfaceid": "9c28f995-e2b2-4556-a305-8b9194fd6e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2014.170592] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3597320-c553-4541-882e-70f269be961e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.173959] env[62519]: DEBUG nova.compute.manager [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2014.259334] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2014.259612] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2014.259732] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleting the datastore file [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2014.260187] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ad048ea-2ef9-4e60-8fec-80c6502d70e7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.268277] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2014.268277] env[62519]: value = "task-1803452" [ 2014.268277] env[62519]: _type = "Task" [ 2014.268277] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.277908] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.467266] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5245e66e-fb59-7f54-adc8-43a86a237762, 'name': SearchDatastore_Task, 'duration_secs': 0.013424} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.467610] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.468240] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2014.468240] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.468240] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.468403] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2014.469013] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bfafcc8-4ae9-4bbf-b2a2-4d49f823fbec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.477481] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2014.477748] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2014.478407] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e5ea186-93fd-4e29-b22b-d2ecbe5d8688 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.483792] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2014.483792] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5282e20d-1f6f-902f-0f42-403b094e6e1e" [ 2014.483792] env[62519]: _type = "Task" [ 2014.483792] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.493577] env[62519]: DEBUG nova.compute.manager [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2014.496191] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5282e20d-1f6f-902f-0f42-403b094e6e1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.603009] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.603243] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.603449] env[62519]: DEBUG nova.network.neutron [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2014.673212] env[62519]: DEBUG oslo_concurrency.lockutils [req-aef074c0-d088-43f1-b000-24c97ae0f9d3 req-bee8ebc8-d03e-453f-b31e-654bf1629e0b service nova] Releasing lock "refresh_cache-90cdb4a1-6192-42e8-ad30-90faeefa4d18" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.680710] env[62519]: DEBUG nova.compute.manager [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received event network-vif-unplugged-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2014.680915] env[62519]: DEBUG oslo_concurrency.lockutils [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] Acquiring lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.681152] env[62519]: DEBUG oslo_concurrency.lockutils [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.681320] env[62519]: DEBUG oslo_concurrency.lockutils [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.681487] env[62519]: DEBUG nova.compute.manager [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] No waiting events found dispatching network-vif-unplugged-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2014.681654] env[62519]: WARNING nova.compute.manager [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received unexpected event network-vif-unplugged-9a08cba9-bd48-4013-a6c0-13049c0b5fdb for instance with vm_state shelved and task_state shelving_offloading. [ 2014.681859] env[62519]: DEBUG nova.compute.manager [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received event network-changed-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2014.682052] env[62519]: DEBUG nova.compute.manager [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Refreshing instance network info cache due to event network-changed-9a08cba9-bd48-4013-a6c0-13049c0b5fdb. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2014.682253] env[62519]: DEBUG oslo_concurrency.lockutils [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] Acquiring lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.682389] env[62519]: DEBUG oslo_concurrency.lockutils [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] Acquired lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.682541] env[62519]: DEBUG nova.network.neutron [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Refreshing network info cache for port 9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2014.697652] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.697880] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.699360] env[62519]: INFO nova.compute.claims [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2014.778133] env[62519]: DEBUG oslo_vmware.api [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136469} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.778376] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2014.778555] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2014.778760] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2014.796339] env[62519]: INFO nova.scheduler.client.report [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted allocations for instance 47439070-54d8-454c-bf1d-7a2a33d82e9a [ 2014.995164] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5282e20d-1f6f-902f-0f42-403b094e6e1e, 'name': SearchDatastore_Task, 'duration_secs': 0.010275} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.996086] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2c26358-26a5-4f8c-94c3-3f67f5d99a99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.004805] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2015.004805] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b52520-82c6-d359-8d1e-176672d0718c" [ 2015.004805] env[62519]: _type = "Task" [ 2015.004805] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.013120] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b52520-82c6-d359-8d1e-176672d0718c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.014667] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2015.300538] env[62519]: DEBUG oslo_concurrency.lockutils [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2015.472133] env[62519]: DEBUG nova.network.neutron [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [{"id": "2ee072d4-2bdb-4a83-90dd-06086f515634", "address": "fa:16:3e:e6:86:ec", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee072d4-2b", "ovs_interfaceid": "2ee072d4-2bdb-4a83-90dd-06086f515634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.517549] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52b52520-82c6-d359-8d1e-176672d0718c, 'name': SearchDatastore_Task, 'duration_secs': 0.011013} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.518207] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.518476] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 90cdb4a1-6192-42e8-ad30-90faeefa4d18/90cdb4a1-6192-42e8-ad30-90faeefa4d18.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2015.518758] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23f46882-0cc9-4109-b8e8-c4f10ccf2967 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.522299] env[62519]: DEBUG nova.network.neutron [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updated VIF entry in instance network info cache for port 9a08cba9-bd48-4013-a6c0-13049c0b5fdb. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2015.525864] env[62519]: DEBUG nova.network.neutron [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating instance_info_cache with network_info: [{"id": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "address": "fa:16:3e:82:31:6c", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap9a08cba9-bd", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.526890] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2015.526890] env[62519]: value = "task-1803453" [ 2015.526890] env[62519]: _type = "Task" [ 2015.526890] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.535846] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.813489] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1345f1-9088-4861-8fe4-3df48bb8822b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.822433] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137a6c7c-2776-4718-98a9-58be2c070a92 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.854795] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df832529-9b66-4248-880c-61ebfebc2e2e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.863996] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc112791-e055-452d-9144-a17e28ecbfb1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.879241] env[62519]: DEBUG nova.compute.provider_tree [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2015.973826] env[62519]: DEBUG oslo_concurrency.lockutils [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.027936] env[62519]: DEBUG oslo_concurrency.lockutils [req-2d4934d6-27ef-4b6a-8428-5107f6a08401 req-37143325-8137-41e8-80c9-7304ffe33fb7 service nova] Releasing lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.038103] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803453, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486146} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.038368] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 90cdb4a1-6192-42e8-ad30-90faeefa4d18/90cdb4a1-6192-42e8-ad30-90faeefa4d18.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2016.038581] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2016.038840] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0bd377d-4ee0-48c1-8e6f-e2e4120ba574 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.047605] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2016.047605] env[62519]: value = "task-1803454" [ 2016.047605] env[62519]: _type = "Task" [ 2016.047605] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.054695] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803454, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.382940] env[62519]: DEBUG nova.scheduler.client.report [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2016.482345] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df551d21-a2d8-459c-9726-86dc0bde4be6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.489650] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8bad03-1295-4b75-a4ed-829991cee166 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.556195] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803454, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062069} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.556537] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2016.557348] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979525d5-5ab8-4bd0-8813-6c8a489a7bf4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.581050] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 90cdb4a1-6192-42e8-ad30-90faeefa4d18/90cdb4a1-6192-42e8-ad30-90faeefa4d18.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2016.581296] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd081016-a05a-4949-8f54-b389e9e83733 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.595779] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.602301] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2016.602301] env[62519]: value = "task-1803455" [ 2016.602301] env[62519]: _type = "Task" [ 2016.602301] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.611580] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803455, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.887790] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.190s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.888299] env[62519]: DEBUG nova.compute.manager [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2016.891269] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.877s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.892939] env[62519]: INFO nova.compute.claims [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2017.112565] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803455, 'name': ReconfigVM_Task, 'duration_secs': 0.315483} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.112868] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 90cdb4a1-6192-42e8-ad30-90faeefa4d18/90cdb4a1-6192-42e8-ad30-90faeefa4d18.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2017.113555] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e67fb0d4-3bc0-41b3-9537-75febb11126c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.120969] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2017.120969] env[62519]: value = "task-1803456" [ 2017.120969] env[62519]: _type = "Task" [ 2017.120969] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.133419] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803456, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.398071] env[62519]: DEBUG nova.compute.utils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2017.401396] env[62519]: DEBUG nova.compute.manager [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2017.401556] env[62519]: DEBUG nova.network.neutron [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2017.439802] env[62519]: DEBUG nova.policy [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eca5c7b079d4785941d68d7c51df5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63a46158057949478e5c79fbe0d4d5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2017.591679] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15698ed-3fbf-4d9b-a916-1768ce89432e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.616750] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e86b6e5-b58d-4d66-ba06-dd1f1bbbe684 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.627306] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance '9bf88b2f-63f9-466b-8669-45f17319055d' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2017.636133] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803456, 'name': Rename_Task, 'duration_secs': 0.146876} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.636390] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2017.636615] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b200f89-355a-445d-871c-60b6fbebf90a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.642919] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2017.642919] env[62519]: value = "task-1803457" [ 2017.642919] env[62519]: _type = "Task" [ 2017.642919] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.650985] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803457, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.706729] env[62519]: DEBUG nova.network.neutron [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Successfully created port: 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2017.902651] env[62519]: DEBUG nova.compute.manager [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2018.007156] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de94b61-1b97-46f1-8c5b-d4723d0e7da2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.016231] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8837efbd-8dd8-4a5c-a31e-a5e1a0a7fab7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.052976] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d13d0e-e927-4b79-88c3-9885d81302fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.062050] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3233c130-e119-4a31-9329-d4a3a264a32d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.078041] env[62519]: DEBUG nova.compute.provider_tree [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2018.134137] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2018.134431] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e90cc29b-d8c3-4ff3-9129-a681a72fe7c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.143078] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2018.143078] env[62519]: value = "task-1803458" [ 2018.143078] env[62519]: _type = "Task" [ 2018.143078] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.157261] env[62519]: DEBUG oslo_vmware.api [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803457, 'name': PowerOnVM_Task, 'duration_secs': 0.428815} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.160196] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2018.160462] env[62519]: INFO nova.compute.manager [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Took 7.05 seconds to spawn the instance on the hypervisor. [ 2018.160695] env[62519]: DEBUG nova.compute.manager [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2018.160999] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803458, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.161796] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9868e3e4-f1a6-4424-868b-3fa1f36dbafc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.581959] env[62519]: DEBUG nova.scheduler.client.report [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2018.654186] env[62519]: DEBUG oslo_vmware.api [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803458, 'name': PowerOnVM_Task, 'duration_secs': 0.397354} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.654468] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2018.654655] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-610bfd91-d15f-42f9-8b6d-10d9ffd4a84b tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance '9bf88b2f-63f9-466b-8669-45f17319055d' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2018.678930] env[62519]: INFO nova.compute.manager [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Took 11.82 seconds to build instance. [ 2018.916179] env[62519]: DEBUG nova.compute.manager [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2018.942840] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2018.943392] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2018.943392] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2018.943513] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2018.943650] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2018.943756] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2018.943957] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2018.944249] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2018.944474] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2018.944653] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2018.944814] env[62519]: DEBUG nova.virt.hardware [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2018.945747] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5138e481-47b9-4db2-a51e-244ce08983ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.954370] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d05f44-b136-4f58-939b-6e9556975110 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.087837] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.196s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.088596] env[62519]: DEBUG nova.compute.manager [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2019.093584] env[62519]: DEBUG oslo_concurrency.lockutils [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.793s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.093864] env[62519]: DEBUG nova.objects.instance [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'resources' on Instance uuid 47439070-54d8-454c-bf1d-7a2a33d82e9a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2019.096035] env[62519]: DEBUG nova.compute.manager [req-565c9abb-3474-44ff-8665-114e1fdc3ec6 req-0f0d471a-b5c2-4714-887b-a78219f572c7 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received event network-vif-plugged-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2019.096303] env[62519]: DEBUG oslo_concurrency.lockutils [req-565c9abb-3474-44ff-8665-114e1fdc3ec6 req-0f0d471a-b5c2-4714-887b-a78219f572c7 service nova] Acquiring lock "a18af9b7-4548-42d1-8459-508298cb96dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.096654] env[62519]: DEBUG oslo_concurrency.lockutils [req-565c9abb-3474-44ff-8665-114e1fdc3ec6 req-0f0d471a-b5c2-4714-887b-a78219f572c7 service nova] Lock "a18af9b7-4548-42d1-8459-508298cb96dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.097391] env[62519]: DEBUG oslo_concurrency.lockutils [req-565c9abb-3474-44ff-8665-114e1fdc3ec6 req-0f0d471a-b5c2-4714-887b-a78219f572c7 service nova] Lock "a18af9b7-4548-42d1-8459-508298cb96dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.097391] env[62519]: DEBUG nova.compute.manager [req-565c9abb-3474-44ff-8665-114e1fdc3ec6 req-0f0d471a-b5c2-4714-887b-a78219f572c7 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] No waiting events found dispatching network-vif-plugged-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2019.097391] env[62519]: WARNING nova.compute.manager [req-565c9abb-3474-44ff-8665-114e1fdc3ec6 req-0f0d471a-b5c2-4714-887b-a78219f572c7 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received unexpected event network-vif-plugged-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 for instance with vm_state building and task_state spawning. [ 2019.113685] env[62519]: INFO nova.compute.manager [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Rescuing [ 2019.113971] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "refresh_cache-90cdb4a1-6192-42e8-ad30-90faeefa4d18" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.114454] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "refresh_cache-90cdb4a1-6192-42e8-ad30-90faeefa4d18" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.114454] env[62519]: DEBUG nova.network.neutron [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2019.189969] env[62519]: DEBUG nova.network.neutron [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Successfully updated port: 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2019.594062] env[62519]: DEBUG nova.compute.utils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2019.595486] env[62519]: DEBUG nova.compute.manager [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2019.595751] env[62519]: DEBUG nova.network.neutron [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2019.599404] env[62519]: DEBUG nova.objects.instance [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'numa_topology' on Instance uuid 47439070-54d8-454c-bf1d-7a2a33d82e9a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2019.652997] env[62519]: DEBUG nova.policy [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c586ba4158b947b0a25d1614c17ebb51', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12977ed65a1b410a987b049e9d1dce3e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2019.689306] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.689466] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.689602] env[62519]: DEBUG nova.network.neutron [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2019.924214] env[62519]: DEBUG nova.network.neutron [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Updating instance_info_cache with network_info: [{"id": "9c28f995-e2b2-4556-a305-8b9194fd6e11", "address": "fa:16:3e:eb:ed:d5", "network": {"id": "8bab7e32-d01d-4dff-8b50-f49f6a04314c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1759123998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8eb0014c990b48ba8f9cfc5629d72f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c28f995-e2", "ovs_interfaceid": "9c28f995-e2b2-4556-a305-8b9194fd6e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.023833] env[62519]: DEBUG nova.network.neutron [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Successfully created port: 55b99762-526d-4033-9eed-24af176c71e4 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2020.099180] env[62519]: DEBUG nova.compute.manager [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2020.102347] env[62519]: DEBUG nova.objects.base [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Object Instance<47439070-54d8-454c-bf1d-7a2a33d82e9a> lazy-loaded attributes: resources,numa_topology {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2020.185771] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d524302e-bbec-49be-af91-3aad1c2e6617 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.336s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.215521] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38ebaf6-8acd-48da-83ec-fc8f86f1e5c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.225142] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d96564-2aed-4773-b90c-886efe8af836 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.229445] env[62519]: DEBUG nova.network.neutron [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2020.260265] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e03c07-efba-49c8-bdeb-f38e753e4055 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.268690] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea493ea-59da-4e0f-b532-159457f4fdd2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.283572] env[62519]: DEBUG nova.compute.provider_tree [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2020.427600] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "refresh_cache-90cdb4a1-6192-42e8-ad30-90faeefa4d18" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.590778] env[62519]: DEBUG nova.network.neutron [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.786966] env[62519]: DEBUG nova.scheduler.client.report [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2021.094394] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.094394] env[62519]: DEBUG nova.compute.manager [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Instance network_info: |[{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2021.094394] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:4f:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2021.102299] env[62519]: DEBUG oslo.service.loopingcall [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2021.102922] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2021.103228] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b5e6bdf-8a5b-407a-91e9-02879058ca97 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.120677] env[62519]: DEBUG nova.compute.manager [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2021.124060] env[62519]: DEBUG nova.compute.manager [req-dcb06bc9-01b4-438d-90c4-6e8e4941688f req-938d4996-51a9-4956-96d5-3ac0b37a255a service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2021.124060] env[62519]: DEBUG nova.compute.manager [req-dcb06bc9-01b4-438d-90c4-6e8e4941688f req-938d4996-51a9-4956-96d5-3ac0b37a255a service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing instance network info cache due to event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2021.124295] env[62519]: DEBUG oslo_concurrency.lockutils [req-dcb06bc9-01b4-438d-90c4-6e8e4941688f req-938d4996-51a9-4956-96d5-3ac0b37a255a service nova] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.124475] env[62519]: DEBUG oslo_concurrency.lockutils [req-dcb06bc9-01b4-438d-90c4-6e8e4941688f req-938d4996-51a9-4956-96d5-3ac0b37a255a service nova] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.124733] env[62519]: DEBUG nova.network.neutron [req-dcb06bc9-01b4-438d-90c4-6e8e4941688f req-938d4996-51a9-4956-96d5-3ac0b37a255a service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2021.134089] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2021.134089] env[62519]: value = "task-1803459" [ 2021.134089] env[62519]: _type = "Task" [ 2021.134089] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.144834] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803459, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.153738] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2021.153972] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2021.154151] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2021.154336] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2021.154478] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2021.154624] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2021.154828] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2021.155180] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2021.155445] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2021.155634] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2021.155806] env[62519]: DEBUG nova.virt.hardware [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2021.156653] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187b7fd1-4331-4032-9190-e74800664da7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.168524] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e110624-bff0-48ce-b610-190e2823db5e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.265225] env[62519]: DEBUG nova.network.neutron [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Port 2ee072d4-2bdb-4a83-90dd-06086f515634 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2021.265552] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.265711] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.265876] env[62519]: DEBUG nova.network.neutron [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2021.291979] env[62519]: DEBUG oslo_concurrency.lockutils [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.198s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.645964] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803459, 'name': CreateVM_Task, 'duration_secs': 0.440939} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.646517] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2021.647621] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.647817] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.648147] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2021.648445] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d444301e-3025-43b9-8c72-25eae977a695 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.654410] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2021.654410] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52db7182-cc8d-23f1-e014-9b70b9173cef" [ 2021.654410] env[62519]: _type = "Task" [ 2021.654410] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.663658] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52db7182-cc8d-23f1-e014-9b70b9173cef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.802979] env[62519]: DEBUG oslo_concurrency.lockutils [None req-50cc0e8a-f13a-4a85-abd1-026e9f33ded0 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 28.303s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.803969] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 5.208s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.804183] env[62519]: INFO nova.compute.manager [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Unshelving [ 2021.869047] env[62519]: DEBUG nova.network.neutron [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Successfully updated port: 55b99762-526d-4033-9eed-24af176c71e4 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2021.970063] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2021.970381] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c32c008f-6ee4-4fa5-bf4d-e4dc783aab7e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.976751] env[62519]: DEBUG nova.network.neutron [req-dcb06bc9-01b4-438d-90c4-6e8e4941688f req-938d4996-51a9-4956-96d5-3ac0b37a255a service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updated VIF entry in instance network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2021.977176] env[62519]: DEBUG nova.network.neutron [req-dcb06bc9-01b4-438d-90c4-6e8e4941688f req-938d4996-51a9-4956-96d5-3ac0b37a255a service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.979980] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2021.979980] env[62519]: value = "task-1803460" [ 2021.979980] env[62519]: _type = "Task" [ 2021.979980] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.990068] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803460, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.091872] env[62519]: DEBUG nova.network.neutron [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [{"id": "2ee072d4-2bdb-4a83-90dd-06086f515634", "address": "fa:16:3e:e6:86:ec", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee072d4-2b", "ovs_interfaceid": "2ee072d4-2bdb-4a83-90dd-06086f515634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.166594] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52db7182-cc8d-23f1-e014-9b70b9173cef, 'name': SearchDatastore_Task, 'duration_secs': 0.033698} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.166957] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.167223] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2022.167501] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2022.167661] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2022.167915] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2022.168535] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2130bcd-eb17-4d2d-9279-fa87467a42b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.179245] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2022.179447] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2022.180194] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47c6624a-0463-4f46-b45b-8cd2e1d9e170 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.186714] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2022.186714] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52741106-0aab-f782-2330-6aed34bcad54" [ 2022.186714] env[62519]: _type = "Task" [ 2022.186714] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.195857] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52741106-0aab-f782-2330-6aed34bcad54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.372416] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2022.372416] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2022.372416] env[62519]: DEBUG nova.network.neutron [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2022.481444] env[62519]: DEBUG oslo_concurrency.lockutils [req-dcb06bc9-01b4-438d-90c4-6e8e4941688f req-938d4996-51a9-4956-96d5-3ac0b37a255a service nova] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.490728] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803460, 'name': PowerOffVM_Task, 'duration_secs': 0.262187} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.491062] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2022.491842] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24f0846-26c5-4c6b-9b15-2862095b9560 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.510283] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c1e623-3b17-4389-bf22-0d9b5a6f92e9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.542453] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2022.542712] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b76e7720-586d-4c67-864a-4564a695838e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.550108] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2022.550108] env[62519]: value = "task-1803461" [ 2022.550108] env[62519]: _type = "Task" [ 2022.550108] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.557398] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.594148] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.697197] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52741106-0aab-f782-2330-6aed34bcad54, 'name': SearchDatastore_Task, 'duration_secs': 0.011614} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.697989] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91768863-d25a-49b9-93fe-191c9a0e16aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.703341] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2022.703341] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52870dcc-ec7d-766b-e294-944b9298e3e8" [ 2022.703341] env[62519]: _type = "Task" [ 2022.703341] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.710986] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52870dcc-ec7d-766b-e294-944b9298e3e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.814995] env[62519]: DEBUG nova.compute.utils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2022.901935] env[62519]: DEBUG nova.network.neutron [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2023.024607] env[62519]: DEBUG nova.network.neutron [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance_info_cache with network_info: [{"id": "55b99762-526d-4033-9eed-24af176c71e4", "address": "fa:16:3e:d8:0e:f7", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b99762-52", "ovs_interfaceid": "55b99762-526d-4033-9eed-24af176c71e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.063625] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2023.063926] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2023.064238] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.097277] env[62519]: DEBUG nova.compute.manager [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62519) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:902}} [ 2023.141876] env[62519]: DEBUG nova.compute.manager [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Received event network-vif-plugged-55b99762-526d-4033-9eed-24af176c71e4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2023.142885] env[62519]: DEBUG oslo_concurrency.lockutils [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] Acquiring lock "4049de7f-4ace-4017-8f9a-63817de5f81c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.142885] env[62519]: DEBUG oslo_concurrency.lockutils [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.142885] env[62519]: DEBUG oslo_concurrency.lockutils [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.142885] env[62519]: DEBUG nova.compute.manager [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] No waiting events found dispatching network-vif-plugged-55b99762-526d-4033-9eed-24af176c71e4 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2023.142885] env[62519]: WARNING nova.compute.manager [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Received unexpected event network-vif-plugged-55b99762-526d-4033-9eed-24af176c71e4 for instance with vm_state building and task_state spawning. [ 2023.143465] env[62519]: DEBUG nova.compute.manager [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Received event network-changed-55b99762-526d-4033-9eed-24af176c71e4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2023.143465] env[62519]: DEBUG nova.compute.manager [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Refreshing instance network info cache due to event network-changed-55b99762-526d-4033-9eed-24af176c71e4. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2023.143465] env[62519]: DEBUG oslo_concurrency.lockutils [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] Acquiring lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.214012] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52870dcc-ec7d-766b-e294-944b9298e3e8, 'name': SearchDatastore_Task, 'duration_secs': 0.041939} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.214296] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2023.214549] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] a18af9b7-4548-42d1-8459-508298cb96dc/a18af9b7-4548-42d1-8459-508298cb96dc.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2023.214824] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.215019] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2023.215231] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8cf3f3f7-3cb6-457e-ada6-7c6dba56f7ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.217076] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcff9167-d478-4876-972d-bfa71fde6a62 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.226561] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2023.226561] env[62519]: value = "task-1803462" [ 2023.226561] env[62519]: _type = "Task" [ 2023.226561] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.227631] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2023.227801] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2023.231163] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c5a66ec-37dd-4096-ad0f-8783d04ab4c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.236834] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2023.236834] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52db830e-621c-2e2a-d8a0-67d976c4cc51" [ 2023.236834] env[62519]: _type = "Task" [ 2023.236834] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.240131] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803462, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.248160] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52db830e-621c-2e2a-d8a0-67d976c4cc51, 'name': SearchDatastore_Task, 'duration_secs': 0.009099} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.249031] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83bcbd09-df11-4143-83aa-cd2cf2493aff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.254414] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2023.254414] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529c7d5c-ea30-fff9-b9c2-6c85a5a7b5d1" [ 2023.254414] env[62519]: _type = "Task" [ 2023.254414] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.262078] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529c7d5c-ea30-fff9-b9c2-6c85a5a7b5d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.317803] env[62519]: INFO nova.virt.block_device [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Booting with volume 64f7dc0b-1b91-40f1-85b7-8256d2cde712 at /dev/sdb [ 2023.348888] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40afdcb0-90dd-48cc-a9d1-a7ed3b90e788 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.358468] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b6e066-7152-4f5b-987d-63c5e8f2cf3e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.386038] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d03d1f4-b591-45f1-bcea-e15d2d5f0e49 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.395611] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192e79d0-e221-4144-9df8-17a03d4a4432 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.425612] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020d4ce6-dae2-4b61-b7f6-e17255ba02ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.434120] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58322cf-85c8-45e1-ad1c-6814d01fd0a1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.450665] env[62519]: DEBUG nova.virt.block_device [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating existing volume attachment record: d6e286e4-a7e2-48a0-a24d-650edfbc37f8 {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2023.527893] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2023.528288] env[62519]: DEBUG nova.compute.manager [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Instance network_info: |[{"id": "55b99762-526d-4033-9eed-24af176c71e4", "address": "fa:16:3e:d8:0e:f7", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b99762-52", "ovs_interfaceid": "55b99762-526d-4033-9eed-24af176c71e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2023.528629] env[62519]: DEBUG oslo_concurrency.lockutils [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] Acquired lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.528837] env[62519]: DEBUG nova.network.neutron [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Refreshing network info cache for port 55b99762-526d-4033-9eed-24af176c71e4 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2023.530300] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:0e:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e028024-a9c1-4cae-8849-ea770a7ae0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55b99762-526d-4033-9eed-24af176c71e4', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2023.542283] env[62519]: DEBUG oslo.service.loopingcall [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2023.546591] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2023.547362] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69d6c7d1-09a0-4554-a085-bd2dbd3b3f23 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.574687] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2023.574687] env[62519]: value = "task-1803463" [ 2023.574687] env[62519]: _type = "Task" [ 2023.574687] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.585941] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803463, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.738866] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803462, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.424228} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.739097] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] a18af9b7-4548-42d1-8459-508298cb96dc/a18af9b7-4548-42d1-8459-508298cb96dc.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2023.739350] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2023.739557] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84c4b0db-3c04-4089-989f-147b598a1e31 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.746363] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2023.746363] env[62519]: value = "task-1803465" [ 2023.746363] env[62519]: _type = "Task" [ 2023.746363] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.754301] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803465, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.767408] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529c7d5c-ea30-fff9-b9c2-6c85a5a7b5d1, 'name': SearchDatastore_Task, 'duration_secs': 0.008684} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.767709] env[62519]: DEBUG oslo_concurrency.lockutils [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2023.767963] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 90cdb4a1-6192-42e8-ad30-90faeefa4d18/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. {{(pid=62519) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2023.768246] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a97f618-c67b-4216-9957-95d7144484c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.775885] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2023.775885] env[62519]: value = "task-1803466" [ 2023.775885] env[62519]: _type = "Task" [ 2023.775885] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.785494] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.797820] env[62519]: DEBUG nova.network.neutron [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updated VIF entry in instance network info cache for port 55b99762-526d-4033-9eed-24af176c71e4. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2023.798205] env[62519]: DEBUG nova.network.neutron [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance_info_cache with network_info: [{"id": "55b99762-526d-4033-9eed-24af176c71e4", "address": "fa:16:3e:d8:0e:f7", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b99762-52", "ovs_interfaceid": "55b99762-526d-4033-9eed-24af176c71e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2024.084783] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803463, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.205148] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.205502] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.256055] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803465, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082352} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.256312] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2024.257073] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2189bd-c13f-4d78-b178-cc9c179f0108 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.278976] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] a18af9b7-4548-42d1-8459-508298cb96dc/a18af9b7-4548-42d1-8459-508298cb96dc.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2024.279308] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-533dde69-563c-4af2-a143-2733a6eb3b89 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.300929] env[62519]: DEBUG oslo_concurrency.lockutils [req-e380a0f1-ea96-4695-9f41-05e0d28a5226 req-9938efba-c2e5-43be-8f23-2ecfafcc3104 service nova] Releasing lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2024.304037] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.305369] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2024.305369] env[62519]: value = "task-1803469" [ 2024.305369] env[62519]: _type = "Task" [ 2024.305369] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.313275] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.585673] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803463, 'name': CreateVM_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.708582] env[62519]: DEBUG nova.objects.instance [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'migration_context' on Instance uuid 9bf88b2f-63f9-466b-8669-45f17319055d {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2024.789735] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803466, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.949322} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.790015] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 90cdb4a1-6192-42e8-ad30-90faeefa4d18/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. [ 2024.790792] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645e5c39-fd18-4503-b2da-8b6b649de377 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.818603] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 90cdb4a1-6192-42e8-ad30-90faeefa4d18/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2024.821995] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24f7d88e-2a72-480c-a93b-153e098351b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.842082] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803469, 'name': ReconfigVM_Task, 'duration_secs': 0.406906} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.843362] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Reconfigured VM instance instance-00000076 to attach disk [datastore1] a18af9b7-4548-42d1-8459-508298cb96dc/a18af9b7-4548-42d1-8459-508298cb96dc.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2024.844092] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2024.844092] env[62519]: value = "task-1803470" [ 2024.844092] env[62519]: _type = "Task" [ 2024.844092] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.844308] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11e4749e-58bb-4a93-b79f-c922cbc31b99 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.855817] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803470, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.859274] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2024.859274] env[62519]: value = "task-1803471" [ 2024.859274] env[62519]: _type = "Task" [ 2024.859274] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.867638] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803471, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.085583] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803463, 'name': CreateVM_Task, 'duration_secs': 1.443273} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.085993] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2025.086498] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2025.086670] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2025.086991] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2025.087282] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8836773c-0a22-4f0e-bdd3-1d17fc61d288 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.093467] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2025.093467] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fc543c-6a0d-6cdf-8c7f-22e3969afc59" [ 2025.093467] env[62519]: _type = "Task" [ 2025.093467] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.102838] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fc543c-6a0d-6cdf-8c7f-22e3969afc59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.306016] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70787e9d-7d1d-4f9b-b693-4d8e33ae6df4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.312655] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689dc7b5-b98a-4ec9-9fdc-3ea54d2a092f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.342330] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950ce2a9-b890-44db-84ae-38b83ca15c8d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.352319] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebb057d-0506-466a-bf7e-0dd753fce47a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.360686] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803470, 'name': ReconfigVM_Task, 'duration_secs': 0.321269} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.363864] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 90cdb4a1-6192-42e8-ad30-90faeefa4d18/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2025.372031] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d379b13b-1189-4265-88d9-462e1f1be49f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.374949] env[62519]: DEBUG nova.compute.provider_tree [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2025.380998] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803471, 'name': Rename_Task, 'duration_secs': 0.156786} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.397067] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2025.401971] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3bbdb05-ccc8-4f45-8b25-14c6a38191e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.403519] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67607c07-6122-4222-a2f8-45c2d9beec4a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.421241] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2025.421241] env[62519]: value = "task-1803473" [ 2025.421241] env[62519]: _type = "Task" [ 2025.421241] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.422782] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2025.422782] env[62519]: value = "task-1803472" [ 2025.422782] env[62519]: _type = "Task" [ 2025.422782] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.436986] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.439878] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.603440] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52fc543c-6a0d-6cdf-8c7f-22e3969afc59, 'name': SearchDatastore_Task, 'duration_secs': 0.01195} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.603619] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2025.603798] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2025.604096] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2025.604195] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2025.604466] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2025.604627] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f268292-feb6-402c-aa84-37628fbdfd0c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.614125] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2025.614327] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2025.615050] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51ea91c0-5e1a-4c9c-8c7c-db7c09e13f88 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.620982] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2025.620982] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526f9f31-7784-3d76-41b4-c815c9cdaa97" [ 2025.620982] env[62519]: _type = "Task" [ 2025.620982] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.628621] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526f9f31-7784-3d76-41b4-c815c9cdaa97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.879337] env[62519]: DEBUG nova.scheduler.client.report [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2025.935526] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803473, 'name': ReconfigVM_Task, 'duration_secs': 0.171676} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.938838] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2025.939242] env[62519]: DEBUG oslo_vmware.api [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803472, 'name': PowerOnVM_Task, 'duration_secs': 0.505873} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.939670] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa48f0a6-6bf0-4e55-94e3-5e6a3f3d46be {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.941359] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2025.941572] env[62519]: INFO nova.compute.manager [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Took 7.03 seconds to spawn the instance on the hypervisor. [ 2025.941775] env[62519]: DEBUG nova.compute.manager [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2025.942564] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89eab66-0b29-4be4-9b7d-8cfca1582041 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.953271] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2025.953271] env[62519]: value = "task-1803474" [ 2025.953271] env[62519]: _type = "Task" [ 2025.953271] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.960996] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803474, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.132337] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526f9f31-7784-3d76-41b4-c815c9cdaa97, 'name': SearchDatastore_Task, 'duration_secs': 0.034941} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.133267] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c5b14a4-1b47-4dec-9987-472ecb0fd4f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.139831] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2026.139831] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bd080f-8d94-78dd-4017-7bb7974fc205" [ 2026.139831] env[62519]: _type = "Task" [ 2026.139831] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.153061] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bd080f-8d94-78dd-4017-7bb7974fc205, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.465162] env[62519]: INFO nova.compute.manager [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Took 11.79 seconds to build instance. [ 2026.470802] env[62519]: DEBUG oslo_vmware.api [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803474, 'name': PowerOnVM_Task, 'duration_secs': 0.43803} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.470802] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2026.473671] env[62519]: DEBUG nova.compute.manager [None req-6d2f73aa-413e-48ec-b00a-311d813d3466 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2026.474489] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b3b68f-cd14-4401-888a-16efb080fd8b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.652123] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bd080f-8d94-78dd-4017-7bb7974fc205, 'name': SearchDatastore_Task, 'duration_secs': 0.015204} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.652123] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2026.652123] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4049de7f-4ace-4017-8f9a-63817de5f81c/4049de7f-4ace-4017-8f9a-63817de5f81c.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2026.652416] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-236b3104-5c6c-4a8a-b50a-85f8ea720ab2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.659480] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2026.659480] env[62519]: value = "task-1803476" [ 2026.659480] env[62519]: _type = "Task" [ 2026.659480] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.667587] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803476, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.892697] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.687s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.084214] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.084524] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.175722] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803476, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.591318] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.591318] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 2027.591318] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 2027.599993] env[62519]: DEBUG nova.compute.manager [req-4b5157eb-3193-42a3-8db3-245f17b5026c req-b764e490-3ba5-439a-b149-280a6dc6428f service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2027.600665] env[62519]: DEBUG nova.compute.manager [req-4b5157eb-3193-42a3-8db3-245f17b5026c req-b764e490-3ba5-439a-b149-280a6dc6428f service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing instance network info cache due to event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2027.601088] env[62519]: DEBUG oslo_concurrency.lockutils [req-4b5157eb-3193-42a3-8db3-245f17b5026c req-b764e490-3ba5-439a-b149-280a6dc6428f service nova] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2027.601559] env[62519]: DEBUG oslo_concurrency.lockutils [req-4b5157eb-3193-42a3-8db3-245f17b5026c req-b764e490-3ba5-439a-b149-280a6dc6428f service nova] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2027.602077] env[62519]: DEBUG nova.network.neutron [req-4b5157eb-3193-42a3-8db3-245f17b5026c req-b764e490-3ba5-439a-b149-280a6dc6428f service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2027.670183] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803476, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661813} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.670498] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 4049de7f-4ace-4017-8f9a-63817de5f81c/4049de7f-4ace-4017-8f9a-63817de5f81c.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2027.670760] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2027.671090] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5533a634-e599-464d-aaa8-5c3a093019b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.678057] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2027.678057] env[62519]: value = "task-1803477" [ 2027.678057] env[62519]: _type = "Task" [ 2027.678057] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.686852] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.974540] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b1c3f0b7-e2d2-4451-8963-33b35525f2de tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "a18af9b7-4548-42d1-8459-508298cb96dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.303s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.062050] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.062294] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.099921] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Skipping network cache update for instance because it is Building. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10431}} [ 2028.100656] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.100656] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquired lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2028.100656] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Forcefully refreshing network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2028.101029] env[62519]: DEBUG nova.objects.instance [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lazy-loading 'info_cache' on Instance uuid 9bf88b2f-63f9-466b-8669-45f17319055d {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2028.187994] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063134} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.188297] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2028.189121] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00a2ac5-b6a4-4e4f-8d3a-e47324ca44d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.211425] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 4049de7f-4ace-4017-8f9a-63817de5f81c/4049de7f-4ace-4017-8f9a-63817de5f81c.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2028.213639] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e88f56a1-013e-4fee-9759-bcaf7fc3c5e2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.233475] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2028.233475] env[62519]: value = "task-1803478" [ 2028.233475] env[62519]: _type = "Task" [ 2028.233475] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.241398] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803478, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.344989] env[62519]: DEBUG nova.network.neutron [req-4b5157eb-3193-42a3-8db3-245f17b5026c req-b764e490-3ba5-439a-b149-280a6dc6428f service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updated VIF entry in instance network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2028.345413] env[62519]: DEBUG nova.network.neutron [req-4b5157eb-3193-42a3-8db3-245f17b5026c req-b764e490-3ba5-439a-b149-280a6dc6428f service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.402698] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "39c292a3-7032-4845-9ae5-2a41d13db305" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.402933] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "39c292a3-7032-4845-9ae5-2a41d13db305" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.434274] env[62519]: INFO nova.compute.manager [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Swapping old allocation on dict_keys(['f8ca0d98-9158-4b85-ae0e-b106f966dd44']) held by migration ecebd4e9-7aad-4915-8ba8-7e59c0759308 for instance [ 2028.460782] env[62519]: DEBUG nova.scheduler.client.report [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Overwriting current allocation {'allocations': {'f8ca0d98-9158-4b85-ae0e-b106f966dd44': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 176}}, 'project_id': 'd73e71476254453fb23164dce09c6d41', 'user_id': '158d93cf743840a8be4e192e6bd4b8bf', 'consumer_generation': 1} on consumer 9bf88b2f-63f9-466b-8669-45f17319055d {{(pid=62519) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 2028.556353] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.564906] env[62519]: DEBUG nova.compute.manager [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2028.744316] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803478, 'name': ReconfigVM_Task, 'duration_secs': 0.329596} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.744566] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 4049de7f-4ace-4017-8f9a-63817de5f81c/4049de7f-4ace-4017-8f9a-63817de5f81c.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2028.745226] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80138488-e8bd-4454-9948-2de532291b10 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.751797] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2028.751797] env[62519]: value = "task-1803479" [ 2028.751797] env[62519]: _type = "Task" [ 2028.751797] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.759659] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803479, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.847766] env[62519]: DEBUG oslo_concurrency.lockutils [req-4b5157eb-3193-42a3-8db3-245f17b5026c req-b764e490-3ba5-439a-b149-280a6dc6428f service nova] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2028.904904] env[62519]: DEBUG nova.compute.manager [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2029.084044] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.084388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.085918] env[62519]: INFO nova.compute.claims [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2029.261650] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803479, 'name': Rename_Task, 'duration_secs': 0.143341} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.261940] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2029.262212] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ccf349e-8e22-423e-b50d-9e89fac1d5ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.268993] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2029.268993] env[62519]: value = "task-1803480" [ 2029.268993] env[62519]: _type = "Task" [ 2029.268993] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.279080] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803480, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.427333] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.556772] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.780652] env[62519]: DEBUG oslo_vmware.api [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803480, 'name': PowerOnVM_Task, 'duration_secs': 0.444003} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.780928] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2029.781470] env[62519]: INFO nova.compute.manager [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Took 8.66 seconds to spawn the instance on the hypervisor. [ 2029.781470] env[62519]: DEBUG nova.compute.manager [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2029.782223] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f87a94-01da-4717-a3d5-0fcbf096dbd4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.847783] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [{"id": "2ee072d4-2bdb-4a83-90dd-06086f515634", "address": "fa:16:3e:e6:86:ec", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee072d4-2b", "ovs_interfaceid": "2ee072d4-2bdb-4a83-90dd-06086f515634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.187337] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7f9459-6086-4830-9f41-23060bd67850 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.196400] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29861704-a7b6-46b7-9341-44be80bceeb3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.225974] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318888bb-c9c9-4991-b3f7-0b41689b2876 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.233628] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a774a1-ba8a-4579-8f14-843d66192ca1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.246431] env[62519]: DEBUG nova.compute.provider_tree [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2030.297904] env[62519]: INFO nova.compute.manager [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Took 15.30 seconds to build instance. [ 2030.350610] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Releasing lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.350880] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updated the network info_cache for instance {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10489}} [ 2030.351141] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.351332] env[62519]: DEBUG nova.network.neutron [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2030.352481] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.354409] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.354409] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.354409] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.354409] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.354409] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.354409] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 2030.354409] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.752024] env[62519]: DEBUG nova.scheduler.client.report [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2030.838696] env[62519]: DEBUG nova.compute.manager [req-3cd873a5-b92f-41a1-b1eb-c42805d8849f req-0405de31-1228-494a-9a3a-ef62d43b18d5 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Received event network-changed-55b99762-526d-4033-9eed-24af176c71e4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2030.838917] env[62519]: DEBUG nova.compute.manager [req-3cd873a5-b92f-41a1-b1eb-c42805d8849f req-0405de31-1228-494a-9a3a-ef62d43b18d5 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Refreshing instance network info cache due to event network-changed-55b99762-526d-4033-9eed-24af176c71e4. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2030.839174] env[62519]: DEBUG oslo_concurrency.lockutils [req-3cd873a5-b92f-41a1-b1eb-c42805d8849f req-0405de31-1228-494a-9a3a-ef62d43b18d5 service nova] Acquiring lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2030.839318] env[62519]: DEBUG oslo_concurrency.lockutils [req-3cd873a5-b92f-41a1-b1eb-c42805d8849f req-0405de31-1228-494a-9a3a-ef62d43b18d5 service nova] Acquired lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.839475] env[62519]: DEBUG nova.network.neutron [req-3cd873a5-b92f-41a1-b1eb-c42805d8849f req-0405de31-1228-494a-9a3a-ef62d43b18d5 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Refreshing network info cache for port 55b99762-526d-4033-9eed-24af176c71e4 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2030.856201] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.098664] env[62519]: DEBUG nova.network.neutron [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [{"id": "2ee072d4-2bdb-4a83-90dd-06086f515634", "address": "fa:16:3e:e6:86:ec", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee072d4-2b", "ovs_interfaceid": "2ee072d4-2bdb-4a83-90dd-06086f515634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.254400] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.170s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.254911] env[62519]: DEBUG nova.compute.manager [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2031.257825] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.831s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.259298] env[62519]: INFO nova.compute.claims [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2031.552112] env[62519]: DEBUG nova.network.neutron [req-3cd873a5-b92f-41a1-b1eb-c42805d8849f req-0405de31-1228-494a-9a3a-ef62d43b18d5 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updated VIF entry in instance network info cache for port 55b99762-526d-4033-9eed-24af176c71e4. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2031.552547] env[62519]: DEBUG nova.network.neutron [req-3cd873a5-b92f-41a1-b1eb-c42805d8849f req-0405de31-1228-494a-9a3a-ef62d43b18d5 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance_info_cache with network_info: [{"id": "55b99762-526d-4033-9eed-24af176c71e4", "address": "fa:16:3e:d8:0e:f7", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b99762-52", "ovs_interfaceid": "55b99762-526d-4033-9eed-24af176c71e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.601810] env[62519]: DEBUG oslo_concurrency.lockutils [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-9bf88b2f-63f9-466b-8669-45f17319055d" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2031.603402] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ad4285-db1c-4417-99c9-173a0366af40 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.610445] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43381ab2-c149-492a-9f82-e7d3f1560b53 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.765905] env[62519]: DEBUG nova.compute.utils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2031.767495] env[62519]: DEBUG nova.compute.manager [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2031.767495] env[62519]: DEBUG nova.network.neutron [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2031.803519] env[62519]: DEBUG oslo_concurrency.lockutils [None req-63001563-3bba-4db1-8613-bfc57ecd26c3 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.814s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.805134] env[62519]: DEBUG nova.policy [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eca1da9825a74bb6acb5695149518624', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8eb0014c990b48ba8f9cfc5629d72f78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2032.051237] env[62519]: DEBUG nova.network.neutron [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Successfully created port: c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2032.054915] env[62519]: DEBUG oslo_concurrency.lockutils [req-3cd873a5-b92f-41a1-b1eb-c42805d8849f req-0405de31-1228-494a-9a3a-ef62d43b18d5 service nova] Releasing lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2032.271262] env[62519]: DEBUG nova.compute.manager [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2032.379600] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7ba4f3-b181-4160-98b3-33ed0f2fe950 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.387542] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c2feab-d7ec-4f79-a678-92a07664dee7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.419192] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6fd573-130c-4f9d-a57f-40b3bbe4c520 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.426748] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c60dd3-8f74-4c5a-a81b-d90f02cbd88f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.440094] env[62519]: DEBUG nova.compute.provider_tree [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2032.691565] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2032.691897] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e840edd6-e6ec-478e-97fe-1e5eb29e84b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.699095] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2032.699095] env[62519]: value = "task-1803481" [ 2032.699095] env[62519]: _type = "Task" [ 2032.699095] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.707863] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803481, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.943998] env[62519]: DEBUG nova.scheduler.client.report [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2033.208939] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803481, 'name': PowerOffVM_Task, 'duration_secs': 0.411486} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.209280] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2033.209963] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2033.210189] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2033.210347] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2033.210533] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2033.210683] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2033.210832] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2033.211087] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2033.211258] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2033.211441] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2033.211673] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2033.211796] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2033.216717] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3d9b970-04e7-4b84-9581-3aaf27305001 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.233233] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2033.233233] env[62519]: value = "task-1803482" [ 2033.233233] env[62519]: _type = "Task" [ 2033.233233] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.241441] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803482, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.281498] env[62519]: DEBUG nova.compute.manager [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2033.311448] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2033.311692] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2033.311848] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2033.312041] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2033.312195] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2033.312343] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2033.312549] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2033.312704] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2033.312868] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2033.313039] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2033.313228] env[62519]: DEBUG nova.virt.hardware [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2033.314104] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bf394e-2181-4412-a65f-6fe4bf5c1a65 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.322706] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d563e1b-6035-47a3-8c0a-7d346c61658c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.449015] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.449605] env[62519]: DEBUG nova.compute.manager [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2033.452487] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.896s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.452710] env[62519]: DEBUG nova.objects.instance [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'pci_requests' on Instance uuid 47439070-54d8-454c-bf1d-7a2a33d82e9a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2033.482350] env[62519]: DEBUG nova.compute.manager [req-bd821eb8-5eae-46d4-baa2-3d02ad4b572a req-dcba5373-0ee3-4ea1-8355-27e2e25a6069 service nova] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Received event network-vif-plugged-c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2033.482581] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd821eb8-5eae-46d4-baa2-3d02ad4b572a req-dcba5373-0ee3-4ea1-8355-27e2e25a6069 service nova] Acquiring lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.482872] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd821eb8-5eae-46d4-baa2-3d02ad4b572a req-dcba5373-0ee3-4ea1-8355-27e2e25a6069 service nova] Lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.482965] env[62519]: DEBUG oslo_concurrency.lockutils [req-bd821eb8-5eae-46d4-baa2-3d02ad4b572a req-dcba5373-0ee3-4ea1-8355-27e2e25a6069 service nova] Lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.483303] env[62519]: DEBUG nova.compute.manager [req-bd821eb8-5eae-46d4-baa2-3d02ad4b572a req-dcba5373-0ee3-4ea1-8355-27e2e25a6069 service nova] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] No waiting events found dispatching network-vif-plugged-c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2033.483559] env[62519]: WARNING nova.compute.manager [req-bd821eb8-5eae-46d4-baa2-3d02ad4b572a req-dcba5373-0ee3-4ea1-8355-27e2e25a6069 service nova] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Received unexpected event network-vif-plugged-c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6 for instance with vm_state building and task_state spawning. [ 2033.577284] env[62519]: DEBUG nova.network.neutron [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Successfully updated port: c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2033.745243] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803482, 'name': ReconfigVM_Task, 'duration_secs': 0.281275} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.746364] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc5452b-c24d-41cc-869b-1419edbcf377 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.768074] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2033.768598] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2033.769404] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2033.769404] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2033.769404] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2033.769404] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2033.769682] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2033.769742] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2033.769884] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2033.770063] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2033.770245] env[62519]: DEBUG nova.virt.hardware [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2033.771071] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c49ebe05-50cf-494d-9661-6adb60492f7c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.776724] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2033.776724] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524131e8-f8ae-bf71-0010-18c332b8da15" [ 2033.776724] env[62519]: _type = "Task" [ 2033.776724] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.784748] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524131e8-f8ae-bf71-0010-18c332b8da15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.956294] env[62519]: DEBUG nova.compute.utils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2033.958984] env[62519]: DEBUG nova.objects.instance [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'numa_topology' on Instance uuid 47439070-54d8-454c-bf1d-7a2a33d82e9a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2033.960107] env[62519]: DEBUG nova.compute.manager [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2033.960275] env[62519]: DEBUG nova.network.neutron [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2033.998770] env[62519]: DEBUG nova.policy [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eca5c7b079d4785941d68d7c51df5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63a46158057949478e5c79fbe0d4d5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2034.080559] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.080677] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.080955] env[62519]: DEBUG nova.network.neutron [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2034.258860] env[62519]: DEBUG nova.network.neutron [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Successfully created port: 03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2034.287171] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524131e8-f8ae-bf71-0010-18c332b8da15, 'name': SearchDatastore_Task, 'duration_secs': 0.01141} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.292634] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2034.292910] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1228942c-653f-44aa-9988-4fd31966e09c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.311817] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2034.311817] env[62519]: value = "task-1803483" [ 2034.311817] env[62519]: _type = "Task" [ 2034.311817] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.319604] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803483, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.461151] env[62519]: DEBUG nova.compute.manager [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2034.465076] env[62519]: INFO nova.compute.claims [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2034.617447] env[62519]: DEBUG nova.network.neutron [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2034.725356] env[62519]: DEBUG nova.network.neutron [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Updating instance_info_cache with network_info: [{"id": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "address": "fa:16:3e:a2:ac:b7", "network": {"id": "8bab7e32-d01d-4dff-8b50-f49f6a04314c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1759123998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8eb0014c990b48ba8f9cfc5629d72f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0b67f8d-3f", "ovs_interfaceid": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.823722] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.228022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.228120] env[62519]: DEBUG nova.compute.manager [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Instance network_info: |[{"id": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "address": "fa:16:3e:a2:ac:b7", "network": {"id": "8bab7e32-d01d-4dff-8b50-f49f6a04314c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1759123998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8eb0014c990b48ba8f9cfc5629d72f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0b67f8d-3f", "ovs_interfaceid": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2035.228560] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:ac:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2035.236499] env[62519]: DEBUG oslo.service.loopingcall [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2035.236743] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2035.236884] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af2089c0-6998-4e06-8eb9-db3feedca478 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.258491] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2035.258491] env[62519]: value = "task-1803484" [ 2035.258491] env[62519]: _type = "Task" [ 2035.258491] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.266822] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803484, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.322170] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803483, 'name': ReconfigVM_Task, 'duration_secs': 0.586382} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.322462] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2035.323251] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d93ff2-c362-4812-9938-ebd16b3b9b93 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.348756] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 9bf88b2f-63f9-466b-8669-45f17319055d/9bf88b2f-63f9-466b-8669-45f17319055d.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2035.349077] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f37cab06-25a1-493b-8a61-009760093a69 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.366923] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2035.366923] env[62519]: value = "task-1803485" [ 2035.366923] env[62519]: _type = "Task" [ 2035.366923] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.377224] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.476146] env[62519]: DEBUG nova.compute.manager [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2035.507878] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2035.508145] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2035.508328] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2035.508524] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2035.508686] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2035.508859] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2035.509179] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2035.509284] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2035.509458] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2035.509632] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2035.509824] env[62519]: DEBUG nova.virt.hardware [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2035.510800] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508c932a-9e67-4c77-8dac-19969e5454be {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.518281] env[62519]: DEBUG nova.compute.manager [req-4ef61264-ebb5-4397-a355-f753e56a8c33 req-65b91aab-6ef3-4f57-b4aa-88ace978e886 service nova] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Received event network-changed-c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2035.518569] env[62519]: DEBUG nova.compute.manager [req-4ef61264-ebb5-4397-a355-f753e56a8c33 req-65b91aab-6ef3-4f57-b4aa-88ace978e886 service nova] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Refreshing instance network info cache due to event network-changed-c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2035.518692] env[62519]: DEBUG oslo_concurrency.lockutils [req-4ef61264-ebb5-4397-a355-f753e56a8c33 req-65b91aab-6ef3-4f57-b4aa-88ace978e886 service nova] Acquiring lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.518845] env[62519]: DEBUG oslo_concurrency.lockutils [req-4ef61264-ebb5-4397-a355-f753e56a8c33 req-65b91aab-6ef3-4f57-b4aa-88ace978e886 service nova] Acquired lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.519018] env[62519]: DEBUG nova.network.neutron [req-4ef61264-ebb5-4397-a355-f753e56a8c33 req-65b91aab-6ef3-4f57-b4aa-88ace978e886 service nova] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Refreshing network info cache for port c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2035.528242] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe6d062-f046-4a5d-b4c8-565a64e3a3bd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.613682] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd19042f-69e1-4a74-9a72-2a54adf79b80 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.624562] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0eeee9-4d51-4e01-94f7-1cd541d57b4d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.681302] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01169e5-27cd-447f-afd3-2167ccde8ba3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.692680] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f50bb84-96a0-48df-8cbc-5f66ae4737e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.709252] env[62519]: DEBUG nova.compute.provider_tree [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2035.755241] env[62519]: DEBUG nova.network.neutron [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Successfully updated port: 03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2035.769526] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803484, 'name': CreateVM_Task, 'duration_secs': 0.362129} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.769692] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2035.770371] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.770542] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.770933] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2035.771221] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d3b5c89-a88f-4e35-b3db-991eea74bfda {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.775857] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2035.775857] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e31bd8-fe65-b4c9-7975-1bb94f51eab8" [ 2035.775857] env[62519]: _type = "Task" [ 2035.775857] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.783970] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e31bd8-fe65-b4c9-7975-1bb94f51eab8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.876680] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803485, 'name': ReconfigVM_Task, 'duration_secs': 0.483031} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.876988] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 9bf88b2f-63f9-466b-8669-45f17319055d/9bf88b2f-63f9-466b-8669-45f17319055d.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2035.877829] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf83939-842e-4daf-a4ff-dc201360889c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.898722] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93730520-8fa5-4ca0-81fc-71aa041c434e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.919321] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce30060c-fdf5-406e-8412-2484fd6b3256 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.941321] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082a422a-5012-49e9-8151-446277ef524d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.948508] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2035.948757] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b44f894-44ad-4eda-b7c6-ce410f2e8ccd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.954652] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2035.954652] env[62519]: value = "task-1803486" [ 2035.954652] env[62519]: _type = "Task" [ 2035.954652] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.962098] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803486, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.209679] env[62519]: DEBUG nova.network.neutron [req-4ef61264-ebb5-4397-a355-f753e56a8c33 req-65b91aab-6ef3-4f57-b4aa-88ace978e886 service nova] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Updated VIF entry in instance network info cache for port c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2036.210129] env[62519]: DEBUG nova.network.neutron [req-4ef61264-ebb5-4397-a355-f753e56a8c33 req-65b91aab-6ef3-4f57-b4aa-88ace978e886 service nova] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Updating instance_info_cache with network_info: [{"id": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "address": "fa:16:3e:a2:ac:b7", "network": {"id": "8bab7e32-d01d-4dff-8b50-f49f6a04314c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1759123998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8eb0014c990b48ba8f9cfc5629d72f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0b67f8d-3f", "ovs_interfaceid": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.212692] env[62519]: DEBUG nova.scheduler.client.report [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2036.257636] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.257762] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.257907] env[62519]: DEBUG nova.network.neutron [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2036.287032] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e31bd8-fe65-b4c9-7975-1bb94f51eab8, 'name': SearchDatastore_Task, 'duration_secs': 0.009369} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.287224] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.287460] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2036.287691] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.287836] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.288119] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2036.288344] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bd38572-a3d8-40af-98db-276bab4050f7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.296712] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2036.296891] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2036.297570] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aec8d150-8465-4f6a-8b0e-cb5be9c62136 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.303305] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2036.303305] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52351dd9-5c5f-82b6-234a-acd0574faf81" [ 2036.303305] env[62519]: _type = "Task" [ 2036.303305] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.310810] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52351dd9-5c5f-82b6-234a-acd0574faf81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.464542] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803486, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.718608] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.266s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.720900] env[62519]: DEBUG oslo_concurrency.lockutils [req-4ef61264-ebb5-4397-a355-f753e56a8c33 req-65b91aab-6ef3-4f57-b4aa-88ace978e886 service nova] Releasing lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.721359] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.865s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.721568] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.721747] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2036.722605] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b82d42f-a6b0-434c-ad1e-98446da1555b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.730403] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e154afa-f15a-49ec-971c-4b31e011ecc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.744580] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cff8778-ece4-4445-8916-0f875cd3ef49 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.750744] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b92d519-0706-4b7a-a77b-918f409b2e4f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.782898] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181126MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2036.783109] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.783317] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.785643] env[62519]: INFO nova.network.neutron [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating port 9a08cba9-bd48-4013-a6c0-13049c0b5fdb with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2036.813254] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52351dd9-5c5f-82b6-234a-acd0574faf81, 'name': SearchDatastore_Task, 'duration_secs': 0.009614} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.814039] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dff54e71-42c6-4a68-ada6-d71fcacd3910 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.819130] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2036.819130] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]523f5fee-65cc-3185-60a4-25b6cd0ce342" [ 2036.819130] env[62519]: _type = "Task" [ 2036.819130] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.827817] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523f5fee-65cc-3185-60a4-25b6cd0ce342, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.831764] env[62519]: DEBUG nova.network.neutron [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2036.965259] env[62519]: DEBUG oslo_vmware.api [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803486, 'name': PowerOnVM_Task, 'duration_secs': 0.591866} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.965588] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2037.042668] env[62519]: DEBUG nova.network.neutron [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [{"id": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "address": "fa:16:3e:3c:7b:8d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03d52b0a-65", "ovs_interfaceid": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.330078] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]523f5fee-65cc-3185-60a4-25b6cd0ce342, 'name': SearchDatastore_Task, 'duration_secs': 0.00904} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.330078] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.330078] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d921e3c0-cc52-4c5a-90bc-4059ba499ff1/d921e3c0-cc52-4c5a-90bc-4059ba499ff1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2037.330078] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bb68207-4839-47a7-9852-2b07cb6bbe26 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.336455] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2037.336455] env[62519]: value = "task-1803487" [ 2037.336455] env[62519]: _type = "Task" [ 2037.336455] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.343802] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803487, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.546043] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.546043] env[62519]: DEBUG nova.compute.manager [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Instance network_info: |[{"id": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "address": "fa:16:3e:3c:7b:8d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03d52b0a-65", "ovs_interfaceid": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2037.546513] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:7b:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03d52b0a-655f-4661-87b2-35d1b1af19a4', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2037.555315] env[62519]: DEBUG oslo.service.loopingcall [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2037.555899] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2037.556167] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-946ea104-ce35-4a1e-acd0-c878316c32d7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.578080] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2037.578080] env[62519]: value = "task-1803488" [ 2037.578080] env[62519]: _type = "Task" [ 2037.578080] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.583445] env[62519]: DEBUG nova.compute.manager [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received event network-vif-plugged-03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2037.583650] env[62519]: DEBUG oslo_concurrency.lockutils [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] Acquiring lock "39c292a3-7032-4845-9ae5-2a41d13db305-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.583877] env[62519]: DEBUG oslo_concurrency.lockutils [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] Lock "39c292a3-7032-4845-9ae5-2a41d13db305-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.584048] env[62519]: DEBUG oslo_concurrency.lockutils [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] Lock "39c292a3-7032-4845-9ae5-2a41d13db305-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.584272] env[62519]: DEBUG nova.compute.manager [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] No waiting events found dispatching network-vif-plugged-03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2037.584370] env[62519]: WARNING nova.compute.manager [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received unexpected event network-vif-plugged-03d52b0a-655f-4661-87b2-35d1b1af19a4 for instance with vm_state building and task_state spawning. [ 2037.584513] env[62519]: DEBUG nova.compute.manager [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received event network-changed-03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2037.584665] env[62519]: DEBUG nova.compute.manager [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing instance network info cache due to event network-changed-03d52b0a-655f-4661-87b2-35d1b1af19a4. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2037.584838] env[62519]: DEBUG oslo_concurrency.lockutils [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] Acquiring lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.584970] env[62519]: DEBUG oslo_concurrency.lockutils [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] Acquired lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.585140] env[62519]: DEBUG nova.network.neutron [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing network info cache for port 03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2037.593923] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803488, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.818818] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance c9399643-7d74-4064-a721-e6d038a5cef0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2037.819009] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 9bf88b2f-63f9-466b-8669-45f17319055d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2037.819170] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 90cdb4a1-6192-42e8-ad30-90faeefa4d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2037.819303] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance a18af9b7-4548-42d1-8459-508298cb96dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2037.819420] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4049de7f-4ace-4017-8f9a-63817de5f81c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2037.819534] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 47439070-54d8-454c-bf1d-7a2a33d82e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2037.819667] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance d921e3c0-cc52-4c5a-90bc-4059ba499ff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2037.819790] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 39c292a3-7032-4845-9ae5-2a41d13db305 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2037.819987] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2037.820139] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2037.851839] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803487, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493299} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.852615] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d921e3c0-cc52-4c5a-90bc-4059ba499ff1/d921e3c0-cc52-4c5a-90bc-4059ba499ff1.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2037.852836] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2037.853101] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6b77c76-cf3d-47ae-aa75-6af64ae789e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.860652] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2037.860652] env[62519]: value = "task-1803489" [ 2037.860652] env[62519]: _type = "Task" [ 2037.860652] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.870964] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803489, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.940074] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edd2ecd-9817-4457-a305-72375567b6a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.949796] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b0f1bb-b9f7-43cd-8f46-f46b007e95f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.984116] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb628ca-e3f0-4568-a4b0-af82aa582eab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.992007] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35177437-33e1-40c7-99e2-ab5bb5827c28 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.005839] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2038.016075] env[62519]: INFO nova.compute.manager [None req-1c1de668-722b-4bef-8122-fb6596c6c4bb tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance to original state: 'active' [ 2038.089802] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803488, 'name': CreateVM_Task, 'duration_secs': 0.432272} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.089959] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2038.090843] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.091011] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.091357] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2038.091608] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5211826f-5f98-445b-8719-0cf511e9ed12 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.096015] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2038.096015] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d6cc06-9218-fdca-309d-68bc5bbfc2e5" [ 2038.096015] env[62519]: _type = "Task" [ 2038.096015] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.103393] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d6cc06-9218-fdca-309d-68bc5bbfc2e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.370089] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803489, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070436} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.370426] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2038.371190] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efdfd99-6ea0-438b-acd8-112925759f52 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.394295] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] d921e3c0-cc52-4c5a-90bc-4059ba499ff1/d921e3c0-cc52-4c5a-90bc-4059ba499ff1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2038.394587] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbb0d288-51bc-4bf0-9491-38b63ed3f8fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.409376] env[62519]: DEBUG nova.network.neutron [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updated VIF entry in instance network info cache for port 03d52b0a-655f-4661-87b2-35d1b1af19a4. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2038.409690] env[62519]: DEBUG nova.network.neutron [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [{"id": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "address": "fa:16:3e:3c:7b:8d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03d52b0a-65", "ovs_interfaceid": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.416980] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2038.416980] env[62519]: value = "task-1803490" [ 2038.416980] env[62519]: _type = "Task" [ 2038.416980] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.426358] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803490, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.513193] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.513193] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.513193] env[62519]: DEBUG nova.network.neutron [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2038.528800] env[62519]: ERROR nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [req-899bdef0-c4b0-4430-901e-c8c32c70086a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-899bdef0-c4b0-4430-901e-c8c32c70086a"}]} [ 2038.548778] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2038.563332] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2038.563576] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2038.576218] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2038.596079] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2038.608243] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d6cc06-9218-fdca-309d-68bc5bbfc2e5, 'name': SearchDatastore_Task, 'duration_secs': 0.039687} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.608638] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.608902] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2038.609787] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.609970] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.610498] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2038.610791] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bce814ea-8cd5-4c60-9f20-f07d729bc2fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.622114] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2038.622411] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2038.623064] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7ade753-9b37-445f-9bfe-967ffe92302f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.628601] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2038.628601] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dc88ca-e8fd-cb0a-f3cf-0b47c4269da6" [ 2038.628601] env[62519]: _type = "Task" [ 2038.628601] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.638760] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dc88ca-e8fd-cb0a-f3cf-0b47c4269da6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.704922] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36fb09f-7467-4fad-affb-e5b5428e9edb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.712588] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546fab2a-65d4-44b1-9f33-d865d70c9bf2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.743606] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607335c0-a886-4e64-b45a-9a32feb10fea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.751445] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f849f55a-52cc-4446-8d2e-e25c162b70c7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.766609] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2038.912372] env[62519]: DEBUG oslo_concurrency.lockutils [req-bac4a26d-a201-4ebe-968d-8107a36c686b req-e846e7a0-91a6-4bd7-aa71-e3dc68f70f88 service nova] Releasing lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.927168] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.141399] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52dc88ca-e8fd-cb0a-f3cf-0b47c4269da6, 'name': SearchDatastore_Task, 'duration_secs': 0.025824} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.144374] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63a29e5e-78ab-4c6b-a020-3ffd2f4e2665 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.150321] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2039.150321] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522229b5-a6ba-5887-8e51-bbb63e30f566" [ 2039.150321] env[62519]: _type = "Task" [ 2039.150321] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.158372] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522229b5-a6ba-5887-8e51-bbb63e30f566, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.242290] env[62519]: DEBUG nova.network.neutron [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating instance_info_cache with network_info: [{"id": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "address": "fa:16:3e:82:31:6c", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a08cba9-bd", "ovs_interfaceid": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.295255] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 177 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2039.295496] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 177 to 178 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2039.295647] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2039.427431] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803490, 'name': ReconfigVM_Task, 'duration_secs': 0.605537} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.427710] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Reconfigured VM instance instance-00000078 to attach disk [datastore1] d921e3c0-cc52-4c5a-90bc-4059ba499ff1/d921e3c0-cc52-4c5a-90bc-4059ba499ff1.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2039.428352] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37ab8c5a-2242-4417-9c57-f2c37dea0f8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.434975] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2039.434975] env[62519]: value = "task-1803491" [ 2039.434975] env[62519]: _type = "Task" [ 2039.434975] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.440486] env[62519]: DEBUG oslo_concurrency.lockutils [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "c9399643-7d74-4064-a721-e6d038a5cef0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.440712] env[62519]: DEBUG oslo_concurrency.lockutils [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.446563] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803491, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.496989] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "9bf88b2f-63f9-466b-8669-45f17319055d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.497358] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.497584] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "9bf88b2f-63f9-466b-8669-45f17319055d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.497792] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.498031] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.500168] env[62519]: INFO nova.compute.manager [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Terminating instance [ 2039.612316] env[62519]: DEBUG nova.compute.manager [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received event network-vif-plugged-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2039.612580] env[62519]: DEBUG oslo_concurrency.lockutils [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] Acquiring lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.612792] env[62519]: DEBUG oslo_concurrency.lockutils [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.612962] env[62519]: DEBUG oslo_concurrency.lockutils [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.613143] env[62519]: DEBUG nova.compute.manager [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] No waiting events found dispatching network-vif-plugged-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2039.613311] env[62519]: WARNING nova.compute.manager [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received unexpected event network-vif-plugged-9a08cba9-bd48-4013-a6c0-13049c0b5fdb for instance with vm_state shelved_offloaded and task_state spawning. [ 2039.613496] env[62519]: DEBUG nova.compute.manager [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received event network-changed-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2039.613686] env[62519]: DEBUG nova.compute.manager [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Refreshing instance network info cache due to event network-changed-9a08cba9-bd48-4013-a6c0-13049c0b5fdb. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2039.613865] env[62519]: DEBUG oslo_concurrency.lockutils [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] Acquiring lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.661895] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522229b5-a6ba-5887-8e51-bbb63e30f566, 'name': SearchDatastore_Task, 'duration_secs': 0.016115} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.662243] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.662551] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 39c292a3-7032-4845-9ae5-2a41d13db305/39c292a3-7032-4845-9ae5-2a41d13db305.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2039.662849] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-530441ce-f1b2-46d9-9287-0467cac7ca0e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.673085] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2039.673085] env[62519]: value = "task-1803492" [ 2039.673085] env[62519]: _type = "Task" [ 2039.673085] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.685520] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.745162] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.747696] env[62519]: DEBUG oslo_concurrency.lockutils [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] Acquired lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.747929] env[62519]: DEBUG nova.network.neutron [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Refreshing network info cache for port 9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2039.774015] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b28cf9842573a1b04100e8429c2e4834',container_format='bare',created_at=2024-12-12T08:21:16Z,direct_url=,disk_format='vmdk',id=f3e835e2-df44-417d-9a40-88b6c8346b2f,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1060464150-shelved',owner='ff4895c6c47e438e8fb9fbc0ffbfdc82',properties=ImageMetaProps,protected=,size=31664640,status='active',tags=,updated_at=2024-12-12T08:21:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2039.774304] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2039.774525] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2039.774730] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2039.774881] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2039.775046] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2039.775289] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2039.775466] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2039.775666] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2039.775832] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2039.776016] env[62519]: DEBUG nova.virt.hardware [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2039.777147] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a417fa3-b2f9-4c9a-80df-c73ce1272f76 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.785259] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cf62d9-e78c-4f22-bc96-b9bc7362776d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.799327] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:31:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a08cba9-bd48-4013-a6c0-13049c0b5fdb', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2039.806600] env[62519]: DEBUG oslo.service.loopingcall [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2039.807231] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2039.807413] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.024s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.807602] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2039.807892] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-114904de-5497-4c6d-a524-c00baaed222f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.827625] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2039.827625] env[62519]: value = "task-1803493" [ 2039.827625] env[62519]: _type = "Task" [ 2039.827625] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.835013] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803493, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.946214] env[62519]: DEBUG nova.compute.utils [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2039.947550] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803491, 'name': Rename_Task, 'duration_secs': 0.265862} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.948036] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2039.948297] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfa04447-eea9-4139-a350-eb5d7227abe8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.954831] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2039.954831] env[62519]: value = "task-1803494" [ 2039.954831] env[62519]: _type = "Task" [ 2039.954831] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.965027] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.004431] env[62519]: DEBUG nova.compute.manager [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2040.004711] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2040.005020] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f2136db-b26e-4f8b-8f58-b851e548429d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.013592] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2040.013592] env[62519]: value = "task-1803495" [ 2040.013592] env[62519]: _type = "Task" [ 2040.013592] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.022425] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.183474] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803492, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.338705] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803493, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.450868] env[62519]: DEBUG oslo_concurrency.lockutils [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.465359] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803494, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.490099] env[62519]: DEBUG nova.network.neutron [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updated VIF entry in instance network info cache for port 9a08cba9-bd48-4013-a6c0-13049c0b5fdb. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2040.490476] env[62519]: DEBUG nova.network.neutron [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating instance_info_cache with network_info: [{"id": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "address": "fa:16:3e:82:31:6c", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a08cba9-bd", "ovs_interfaceid": "9a08cba9-bd48-4013-a6c0-13049c0b5fdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.525627] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803495, 'name': PowerOffVM_Task, 'duration_secs': 0.395789} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.526051] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2040.526323] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2040.526596] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373871', 'volume_id': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'name': 'volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '9bf88b2f-63f9-466b-8669-45f17319055d', 'attached_at': '2024-12-12T08:21:55.000000', 'detached_at': '', 'volume_id': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'serial': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2040.527430] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d78d8d-49da-41ba-ab91-30c65b0fcc18 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.550200] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a248d76-8091-4ead-9b41-a1876764d6e2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.557033] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efce7356-16ab-4866-bc71-6452e60a1101 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.578147] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994e6d02-84e5-4299-83ab-9963a1b684b2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.593283] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] The volume has not been displaced from its original location: [datastore1] volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a/volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2040.598451] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2040.598845] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1398e39c-03bb-419b-b09c-6a64457cabbf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.615831] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2040.615831] env[62519]: value = "task-1803496" [ 2040.615831] env[62519]: _type = "Task" [ 2040.615831] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.623691] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803496, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.683993] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630526} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.684266] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 39c292a3-7032-4845-9ae5-2a41d13db305/39c292a3-7032-4845-9ae5-2a41d13db305.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2040.684479] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2040.684721] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86935215-b41c-4621-a1d4-6ac9fc663c2b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.691226] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2040.691226] env[62519]: value = "task-1803497" [ 2040.691226] env[62519]: _type = "Task" [ 2040.691226] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.698442] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803497, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.837577] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803493, 'name': CreateVM_Task, 'duration_secs': 0.558534} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.838989] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2040.838989] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.838989] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.839203] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2040.839465] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51f713ad-a428-4019-ab21-52ae8d11d601 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.844440] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2040.844440] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]524bb289-f0bb-b5cb-d561-89535b374b37" [ 2040.844440] env[62519]: _type = "Task" [ 2040.844440] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.851869] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]524bb289-f0bb-b5cb-d561-89535b374b37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.970106] env[62519]: DEBUG oslo_vmware.api [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803494, 'name': PowerOnVM_Task, 'duration_secs': 0.657438} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.970412] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2040.970669] env[62519]: INFO nova.compute.manager [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Took 7.69 seconds to spawn the instance on the hypervisor. [ 2040.971302] env[62519]: DEBUG nova.compute.manager [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2040.971731] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e03a82-2017-46dc-8519-2342cc98dd9a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.993516] env[62519]: DEBUG oslo_concurrency.lockutils [req-9f3ea9d1-5f10-483a-b8f0-9bdddbf30cf3 req-76f00587-a64b-414e-8ec6-33b3e4727278 service nova] Releasing lock "refresh_cache-47439070-54d8-454c-bf1d-7a2a33d82e9a" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.127249] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803496, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.201022] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803497, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066935} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.201414] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2041.202114] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d4884f-6c3e-4938-a75d-39592914a6c3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.229929] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 39c292a3-7032-4845-9ae5-2a41d13db305/39c292a3-7032-4845-9ae5-2a41d13db305.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2041.230298] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab7204db-e2c4-4c82-9fc3-89da5580d25b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.261035] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2041.261035] env[62519]: value = "task-1803498" [ 2041.261035] env[62519]: _type = "Task" [ 2041.261035] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.268751] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803498, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.353947] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.354229] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Processing image f3e835e2-df44-417d-9a40-88b6c8346b2f {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2041.354471] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f/f3e835e2-df44-417d-9a40-88b6c8346b2f.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.354616] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f/f3e835e2-df44-417d-9a40-88b6c8346b2f.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.354801] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2041.355048] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5703201a-a13e-49e2-851c-49518ed3bc0c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.363288] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2041.363460] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2041.364126] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7625bcc-c7a5-4a00-9176-1b32cddeaf25 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.368887] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2041.368887] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5240bc38-7401-0017-1032-9d288b73edf7" [ 2041.368887] env[62519]: _type = "Task" [ 2041.368887] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.376293] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5240bc38-7401-0017-1032-9d288b73edf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.490639] env[62519]: INFO nova.compute.manager [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Took 12.42 seconds to build instance. [ 2041.514190] env[62519]: DEBUG oslo_concurrency.lockutils [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "c9399643-7d74-4064-a721-e6d038a5cef0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.514438] env[62519]: DEBUG oslo_concurrency.lockutils [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.514707] env[62519]: INFO nova.compute.manager [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Attaching volume d1f2e525-3506-4f2f-9d8b-8880ea1acbdc to /dev/sdb [ 2041.544753] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e676fce6-d109-4364-b4c7-ec84938d6791 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.551225] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5742f39-78db-4b28-a35c-d1d2e474d80a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.564745] env[62519]: DEBUG nova.virt.block_device [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Updating existing volume attachment record: 0235f50e-fc58-4686-9b6f-6f175bd3d2bb {{(pid=62519) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2041.626430] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803496, 'name': ReconfigVM_Task, 'duration_secs': 0.686522} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.626719] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2041.631508] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a572554-f595-4967-8583-1f777f276bda {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.648811] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2041.648811] env[62519]: value = "task-1803499" [ 2041.648811] env[62519]: _type = "Task" [ 2041.648811] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.658987] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.770750] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803498, 'name': ReconfigVM_Task, 'duration_secs': 0.294258} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.771080] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 39c292a3-7032-4845-9ae5-2a41d13db305/39c292a3-7032-4845-9ae5-2a41d13db305.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2041.771774] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-22317723-7c8d-4fc3-9f58-53f747b871e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.777734] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2041.777734] env[62519]: value = "task-1803501" [ 2041.777734] env[62519]: _type = "Task" [ 2041.777734] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.785922] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803501, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.879490] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Preparing fetch location {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2041.879802] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Fetch image to [datastore1] OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e/OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e.vmdk {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2041.879991] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Downloading stream optimized image f3e835e2-df44-417d-9a40-88b6c8346b2f to [datastore1] OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e/OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e.vmdk on the data store datastore1 as vApp {{(pid=62519) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2041.880105] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Downloading image file data f3e835e2-df44-417d-9a40-88b6c8346b2f to the ESX as VM named 'OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e' {{(pid=62519) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2041.975089] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2041.975089] env[62519]: value = "resgroup-9" [ 2041.975089] env[62519]: _type = "ResourcePool" [ 2041.975089] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2041.975426] env[62519]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-aef62112-7cae-4dff-9e65-a3999c649fed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.997162] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lease: (returnval){ [ 2041.997162] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e06c8d-f393-5e81-459e-32d7c10f7ec2" [ 2041.997162] env[62519]: _type = "HttpNfcLease" [ 2041.997162] env[62519]: } obtained for vApp import into resource pool (val){ [ 2041.997162] env[62519]: value = "resgroup-9" [ 2041.997162] env[62519]: _type = "ResourcePool" [ 2041.997162] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2041.997493] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the lease: (returnval){ [ 2041.997493] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e06c8d-f393-5e81-459e-32d7c10f7ec2" [ 2041.997493] env[62519]: _type = "HttpNfcLease" [ 2041.997493] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2042.004574] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2042.004574] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e06c8d-f393-5e81-459e-32d7c10f7ec2" [ 2042.004574] env[62519]: _type = "HttpNfcLease" [ 2042.004574] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2042.159121] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803499, 'name': ReconfigVM_Task, 'duration_secs': 0.248759} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.159486] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373871', 'volume_id': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'name': 'volume-a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '9bf88b2f-63f9-466b-8669-45f17319055d', 'attached_at': '2024-12-12T08:21:55.000000', 'detached_at': '', 'volume_id': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a', 'serial': 'a0f01d64-f203-4e5a-88dd-8cca1d0cd19a'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2042.159831] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2042.160644] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529c1c56-1d56-41c3-9010-20ec8ded59ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.167508] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2042.167749] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfcc7a4c-ac6e-43a3-afa9-64fb84fc4ea2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.243272] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2042.243634] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2042.243821] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleting the datastore file [datastore1] 9bf88b2f-63f9-466b-8669-45f17319055d {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2042.244162] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f35b05df-b666-4b89-9f1a-70771ac80a8a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.250752] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2042.250752] env[62519]: value = "task-1803504" [ 2042.250752] env[62519]: _type = "Task" [ 2042.250752] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.259795] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.286692] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803501, 'name': Rename_Task, 'duration_secs': 0.139771} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.286945] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2042.287200] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb8e41db-fb0e-4141-9801-cb2dc935fc39 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.293484] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2042.293484] env[62519]: value = "task-1803505" [ 2042.293484] env[62519]: _type = "Task" [ 2042.293484] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.302190] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803505, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.498651] env[62519]: INFO nova.compute.manager [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Rescuing [ 2042.498988] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2042.499105] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2042.499343] env[62519]: DEBUG nova.network.neutron [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2042.507206] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2042.507206] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e06c8d-f393-5e81-459e-32d7c10f7ec2" [ 2042.507206] env[62519]: _type = "HttpNfcLease" [ 2042.507206] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2042.760628] env[62519]: DEBUG oslo_vmware.api [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803504, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.247603} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.760829] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2042.761018] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2042.761297] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2042.761492] env[62519]: INFO nova.compute.manager [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Took 2.76 seconds to destroy the instance on the hypervisor. [ 2042.761728] env[62519]: DEBUG oslo.service.loopingcall [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2042.761914] env[62519]: DEBUG nova.compute.manager [-] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2042.762018] env[62519]: DEBUG nova.network.neutron [-] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2042.803389] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803505, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.002176] env[62519]: DEBUG oslo_concurrency.lockutils [None req-91a48691-0c95-42ef-bfb3-588e4d1b1e4c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.940s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.011206] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2043.011206] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e06c8d-f393-5e81-459e-32d7c10f7ec2" [ 2043.011206] env[62519]: _type = "HttpNfcLease" [ 2043.011206] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2043.305349] env[62519]: DEBUG oslo_vmware.api [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803505, 'name': PowerOnVM_Task, 'duration_secs': 0.848678} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.305660] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2043.305806] env[62519]: INFO nova.compute.manager [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Took 7.83 seconds to spawn the instance on the hypervisor. [ 2043.305981] env[62519]: DEBUG nova.compute.manager [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2043.306934] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e465bc1-01ca-4ebd-b95d-4fa91bf2a013 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.320386] env[62519]: DEBUG nova.network.neutron [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Updating instance_info_cache with network_info: [{"id": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "address": "fa:16:3e:a2:ac:b7", "network": {"id": "8bab7e32-d01d-4dff-8b50-f49f6a04314c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1759123998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8eb0014c990b48ba8f9cfc5629d72f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0b67f8d-3f", "ovs_interfaceid": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.360672] env[62519]: DEBUG nova.compute.manager [req-60d37d17-f43e-490b-b0c0-42ef21ae09e2 req-a3ca1c08-7446-43e5-b9dd-b44eb1900cd6 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Received event network-vif-deleted-2ee072d4-2bdb-4a83-90dd-06086f515634 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2043.361183] env[62519]: INFO nova.compute.manager [req-60d37d17-f43e-490b-b0c0-42ef21ae09e2 req-a3ca1c08-7446-43e5-b9dd-b44eb1900cd6 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Neutron deleted interface 2ee072d4-2bdb-4a83-90dd-06086f515634; detaching it from the instance and deleting it from the info cache [ 2043.361611] env[62519]: DEBUG nova.network.neutron [req-60d37d17-f43e-490b-b0c0-42ef21ae09e2 req-a3ca1c08-7446-43e5-b9dd-b44eb1900cd6 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.512877] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2043.512877] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e06c8d-f393-5e81-459e-32d7c10f7ec2" [ 2043.512877] env[62519]: _type = "HttpNfcLease" [ 2043.512877] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2043.513160] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2043.513160] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e06c8d-f393-5e81-459e-32d7c10f7ec2" [ 2043.513160] env[62519]: _type = "HttpNfcLease" [ 2043.513160] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2043.514075] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb09c0e-bebf-493e-8983-66a7a7e9f8a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.524554] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a2f590-0119-6698-545b-2029ff1640eb/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2043.524825] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating HTTP connection to write to file with size = 31664640 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a2f590-0119-6698-545b-2029ff1640eb/disk-0.vmdk. {{(pid=62519) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2043.592860] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1f57d72d-e6f9-4a93-8cc5-5391631042d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.828101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2043.835066] env[62519]: INFO nova.compute.manager [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Took 14.42 seconds to build instance. [ 2043.837958] env[62519]: DEBUG nova.network.neutron [-] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.867345] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9d13f7a5-bed1-46ef-907d-55d72ffce73f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.877732] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f560891f-c57e-4ab9-bfe5-d623eb8e991a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.914058] env[62519]: DEBUG nova.compute.manager [req-60d37d17-f43e-490b-b0c0-42ef21ae09e2 req-a3ca1c08-7446-43e5-b9dd-b44eb1900cd6 service nova] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Detach interface failed, port_id=2ee072d4-2bdb-4a83-90dd-06086f515634, reason: Instance 9bf88b2f-63f9-466b-8669-45f17319055d could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2044.339644] env[62519]: INFO nova.compute.manager [-] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Took 1.58 seconds to deallocate network for instance. [ 2044.761128] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Completed reading data from the image iterator. {{(pid=62519) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2044.761438] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a2f590-0119-6698-545b-2029ff1640eb/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2044.762490] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3704636f-b178-44c9-ae4c-265b5c648975 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.770173] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a2f590-0119-6698-545b-2029ff1640eb/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2044.770380] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a2f590-0119-6698-545b-2029ff1640eb/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2044.770657] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-80be7053-5e42-489d-9620-cc187ca5d05d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.939985] env[62519]: DEBUG oslo_vmware.rw_handles [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a2f590-0119-6698-545b-2029ff1640eb/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2044.940227] env[62519]: INFO nova.virt.vmwareapi.images [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Downloaded image file data f3e835e2-df44-417d-9a40-88b6c8346b2f [ 2044.941133] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824cd944-a079-4a07-970c-064cccb44302 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.958448] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c697e62-edfc-4f65-b45e-1cf2a48b4738 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.991047] env[62519]: INFO nova.virt.vmwareapi.images [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] The imported VM was unregistered [ 2044.997988] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Caching image {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2044.998292] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating directory with path [datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2044.998620] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27a2199c-9aec-49c6-b047-5e367943eb3f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.015257] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Created directory with path [datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2045.015326] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e/OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e.vmdk to [datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f/f3e835e2-df44-417d-9a40-88b6c8346b2f.vmdk. {{(pid=62519) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2045.015600] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-edd1ebb7-6a95-4495-a62e-a9d7856dd9bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.027230] env[62519]: INFO nova.compute.manager [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Took 0.69 seconds to detach 1 volumes for instance. [ 2045.028172] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2045.028172] env[62519]: value = "task-1803508" [ 2045.028172] env[62519]: _type = "Task" [ 2045.028172] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.032794] env[62519]: DEBUG nova.compute.manager [req-30d675be-8cf3-4e7e-b15d-28defcce7dcb req-80967f3c-872d-4eb4-97f2-2fa060b7c10d service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2045.032992] env[62519]: DEBUG nova.compute.manager [req-30d675be-8cf3-4e7e-b15d-28defcce7dcb req-80967f3c-872d-4eb4-97f2-2fa060b7c10d service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing instance network info cache due to event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2045.033244] env[62519]: DEBUG oslo_concurrency.lockutils [req-30d675be-8cf3-4e7e-b15d-28defcce7dcb req-80967f3c-872d-4eb4-97f2-2fa060b7c10d service nova] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2045.033390] env[62519]: DEBUG oslo_concurrency.lockutils [req-30d675be-8cf3-4e7e-b15d-28defcce7dcb req-80967f3c-872d-4eb4-97f2-2fa060b7c10d service nova] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2045.033573] env[62519]: DEBUG nova.network.neutron [req-30d675be-8cf3-4e7e-b15d-28defcce7dcb req-80967f3c-872d-4eb4-97f2-2fa060b7c10d service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2045.043645] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803508, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.346464] env[62519]: DEBUG oslo_concurrency.lockutils [None req-e5503d07-b0ac-465c-85d9-1210feeacb67 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "39c292a3-7032-4845-9ae5-2a41d13db305" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.943s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.378444] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2045.378811] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c648e1c6-45b9-4331-863b-9d23dab2af1b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.388757] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2045.388757] env[62519]: value = "task-1803509" [ 2045.388757] env[62519]: _type = "Task" [ 2045.388757] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.398577] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.536979] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.537272] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.537495] env[62519]: DEBUG nova.objects.instance [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'resources' on Instance uuid 9bf88b2f-63f9-466b-8669-45f17319055d {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2045.546535] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803508, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.902223] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.046885] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803508, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.110640] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2046.110862] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373885', 'volume_id': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'name': 'volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c9399643-7d74-4064-a721-e6d038a5cef0', 'attached_at': '', 'detached_at': '', 'volume_id': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'serial': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2046.111954] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56a85ad-afbc-4c3e-a24e-7c996bde4237 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.143520] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce95ba0-d19d-494c-931d-8df85733a1f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.169190] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc/volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2046.174855] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23f6facf-f861-4992-a66e-0f9febdfde1d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.196698] env[62519]: DEBUG oslo_vmware.api [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 2046.196698] env[62519]: value = "task-1803510" [ 2046.196698] env[62519]: _type = "Task" [ 2046.196698] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.210442] env[62519]: DEBUG oslo_vmware.api [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803510, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.239435] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15044f98-760c-46e0-aba3-c3ef1224b242 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.250731] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5231b77-fbcd-411d-b5d5-09a7f586409b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.256852] env[62519]: DEBUG nova.network.neutron [req-30d675be-8cf3-4e7e-b15d-28defcce7dcb req-80967f3c-872d-4eb4-97f2-2fa060b7c10d service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updated VIF entry in instance network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2046.257278] env[62519]: DEBUG nova.network.neutron [req-30d675be-8cf3-4e7e-b15d-28defcce7dcb req-80967f3c-872d-4eb4-97f2-2fa060b7c10d service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2046.286452] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24229650-ed9e-4dac-9681-e5e8d1928ff1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.299895] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f992c04-0581-4124-b9d3-626f4937d072 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.316320] env[62519]: DEBUG nova.compute.provider_tree [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2046.405632] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803509, 'name': PowerOffVM_Task, 'duration_secs': 0.953287} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.406024] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2046.406843] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f8cff3-814e-4b93-bc39-2ccd5882366c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.427322] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd125080-5425-4e6f-a418-032fa640165c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.468212] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2046.472019] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68f91af1-0191-46ac-99c2-e9731c622371 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.478893] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2046.478893] env[62519]: value = "task-1803511" [ 2046.478893] env[62519]: _type = "Task" [ 2046.478893] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.490197] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2046.490797] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2046.490797] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2046.491936] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2046.492261] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2046.492815] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4707e283-c604-444b-8ed6-278d5645f35b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.511257] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2046.511504] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2046.512456] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-096d00f3-f537-419b-9d77-2379e02d117d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.522023] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2046.522023] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bf9131-6dd5-c038-a9a3-fa12fb97c9d0" [ 2046.522023] env[62519]: _type = "Task" [ 2046.522023] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.533046] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bf9131-6dd5-c038-a9a3-fa12fb97c9d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.544321] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803508, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.577894] env[62519]: DEBUG nova.compute.manager [req-4a6b7b9d-d35a-48a3-9608-dd6a740413f6 req-d9c684f3-802f-49e8-be9f-c6d95118aba8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received event network-changed-03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2046.577894] env[62519]: DEBUG nova.compute.manager [req-4a6b7b9d-d35a-48a3-9608-dd6a740413f6 req-d9c684f3-802f-49e8-be9f-c6d95118aba8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing instance network info cache due to event network-changed-03d52b0a-655f-4661-87b2-35d1b1af19a4. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2046.578134] env[62519]: DEBUG oslo_concurrency.lockutils [req-4a6b7b9d-d35a-48a3-9608-dd6a740413f6 req-d9c684f3-802f-49e8-be9f-c6d95118aba8 service nova] Acquiring lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2046.578283] env[62519]: DEBUG oslo_concurrency.lockutils [req-4a6b7b9d-d35a-48a3-9608-dd6a740413f6 req-d9c684f3-802f-49e8-be9f-c6d95118aba8 service nova] Acquired lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2046.578445] env[62519]: DEBUG nova.network.neutron [req-4a6b7b9d-d35a-48a3-9608-dd6a740413f6 req-d9c684f3-802f-49e8-be9f-c6d95118aba8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing network info cache for port 03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2046.708403] env[62519]: DEBUG oslo_vmware.api [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803510, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.760831] env[62519]: DEBUG oslo_concurrency.lockutils [req-30d675be-8cf3-4e7e-b15d-28defcce7dcb req-80967f3c-872d-4eb4-97f2-2fa060b7c10d service nova] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2046.819642] env[62519]: DEBUG nova.scheduler.client.report [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2047.034749] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bf9131-6dd5-c038-a9a3-fa12fb97c9d0, 'name': SearchDatastore_Task, 'duration_secs': 0.083521} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.038685] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e53416b2-20e6-4b7d-8fe2-894c0c61c2f2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.045260] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2047.045260] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522186b7-d746-79c7-7842-49aeb5297ffa" [ 2047.045260] env[62519]: _type = "Task" [ 2047.045260] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.048377] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803508, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.058569] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522186b7-d746-79c7-7842-49aeb5297ffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.060919] env[62519]: DEBUG nova.compute.manager [req-52b3aca1-1dbe-4308-a708-fbec6d3e0165 req-c7596680-5595-40aa-b8bf-26a55ec570a8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received event network-changed-03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2047.061312] env[62519]: DEBUG nova.compute.manager [req-52b3aca1-1dbe-4308-a708-fbec6d3e0165 req-c7596680-5595-40aa-b8bf-26a55ec570a8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing instance network info cache due to event network-changed-03d52b0a-655f-4661-87b2-35d1b1af19a4. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2047.061312] env[62519]: DEBUG oslo_concurrency.lockutils [req-52b3aca1-1dbe-4308-a708-fbec6d3e0165 req-c7596680-5595-40aa-b8bf-26a55ec570a8 service nova] Acquiring lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2047.211451] env[62519]: DEBUG oslo_vmware.api [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803510, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.327268] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.790s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.359541] env[62519]: INFO nova.scheduler.client.report [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted allocations for instance 9bf88b2f-63f9-466b-8669-45f17319055d [ 2047.409096] env[62519]: DEBUG nova.network.neutron [req-4a6b7b9d-d35a-48a3-9608-dd6a740413f6 req-d9c684f3-802f-49e8-be9f-c6d95118aba8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updated VIF entry in instance network info cache for port 03d52b0a-655f-4661-87b2-35d1b1af19a4. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2047.409543] env[62519]: DEBUG nova.network.neutron [req-4a6b7b9d-d35a-48a3-9608-dd6a740413f6 req-d9c684f3-802f-49e8-be9f-c6d95118aba8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [{"id": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "address": "fa:16:3e:3c:7b:8d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03d52b0a-65", "ovs_interfaceid": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.544340] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803508, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.557476] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522186b7-d746-79c7-7842-49aeb5297ffa, 'name': SearchDatastore_Task, 'duration_secs': 0.082337} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.557730] env[62519]: DEBUG oslo_concurrency.lockutils [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.557979] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d921e3c0-cc52-4c5a-90bc-4059ba499ff1/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. {{(pid=62519) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2047.558253] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c13c54fe-f21d-47f0-adf5-390c0e9f029d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.564487] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2047.564487] env[62519]: value = "task-1803512" [ 2047.564487] env[62519]: _type = "Task" [ 2047.564487] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.572081] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.706459] env[62519]: DEBUG oslo_vmware.api [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803510, 'name': ReconfigVM_Task, 'duration_secs': 1.088618} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.707123] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Reconfigured VM instance instance-00000074 to attach disk [datastore1] volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc/volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2047.711824] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f40cd492-ac09-4596-b265-acf2757427ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.727318] env[62519]: DEBUG oslo_vmware.api [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 2047.727318] env[62519]: value = "task-1803513" [ 2047.727318] env[62519]: _type = "Task" [ 2047.727318] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.735042] env[62519]: DEBUG oslo_vmware.api [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803513, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.867918] env[62519]: DEBUG oslo_concurrency.lockutils [None req-f6541386-e97b-4411-bfc8-9c9090757e57 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "9bf88b2f-63f9-466b-8669-45f17319055d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.370s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.913344] env[62519]: DEBUG oslo_concurrency.lockutils [req-4a6b7b9d-d35a-48a3-9608-dd6a740413f6 req-d9c684f3-802f-49e8-be9f-c6d95118aba8 service nova] Releasing lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.913914] env[62519]: DEBUG oslo_concurrency.lockutils [req-52b3aca1-1dbe-4308-a708-fbec6d3e0165 req-c7596680-5595-40aa-b8bf-26a55ec570a8 service nova] Acquired lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2047.914138] env[62519]: DEBUG nova.network.neutron [req-52b3aca1-1dbe-4308-a708-fbec6d3e0165 req-c7596680-5595-40aa-b8bf-26a55ec570a8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing network info cache for port 03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2048.046286] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803508, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.525778} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.046645] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e/OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e.vmdk to [datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f/f3e835e2-df44-417d-9a40-88b6c8346b2f.vmdk. [ 2048.046881] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Cleaning up location [datastore1] OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2048.047101] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_5c6189da-0c78-44a3-ab73-d18ac363ed5e {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2048.047406] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75eb2415-9269-49b8-bcd5-c467a02d844d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.055777] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2048.055777] env[62519]: value = "task-1803514" [ 2048.055777] env[62519]: _type = "Task" [ 2048.055777] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.064587] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803514, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.074811] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803512, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.240065] env[62519]: DEBUG oslo_vmware.api [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803513, 'name': ReconfigVM_Task, 'duration_secs': 0.239715} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.240439] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373885', 'volume_id': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'name': 'volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c9399643-7d74-4064-a721-e6d038a5cef0', 'attached_at': '', 'detached_at': '', 'volume_id': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'serial': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2048.566465] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803514, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072027} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.569440] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2048.569567] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f/f3e835e2-df44-417d-9a40-88b6c8346b2f.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2048.569805] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f/f3e835e2-df44-417d-9a40-88b6c8346b2f.vmdk to [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a/47439070-54d8-454c-bf1d-7a2a33d82e9a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2048.570050] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c190ac5-191f-42c0-b53b-5b8df4d15469 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.577832] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803512, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555239} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.578217] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] d921e3c0-cc52-4c5a-90bc-4059ba499ff1/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk. [ 2048.578519] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2048.578519] env[62519]: value = "task-1803515" [ 2048.578519] env[62519]: _type = "Task" [ 2048.578519] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.579191] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ea90e5-6cae-4ce0-a656-66caf747dfcb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.589412] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803515, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.610150] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] d921e3c0-cc52-4c5a-90bc-4059ba499ff1/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2048.612643] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db4fcbd1-8cd5-4c70-aa4b-a21a9b545c0c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.627359] env[62519]: DEBUG nova.compute.manager [req-a9ccc12f-dcd6-4929-9b7a-19789433ff2e req-77018ec3-8ba2-4276-a272-94f44595f2d3 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2048.627924] env[62519]: DEBUG nova.compute.manager [req-a9ccc12f-dcd6-4929-9b7a-19789433ff2e req-77018ec3-8ba2-4276-a272-94f44595f2d3 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing instance network info cache due to event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2048.627924] env[62519]: DEBUG oslo_concurrency.lockutils [req-a9ccc12f-dcd6-4929-9b7a-19789433ff2e req-77018ec3-8ba2-4276-a272-94f44595f2d3 service nova] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2048.627924] env[62519]: DEBUG oslo_concurrency.lockutils [req-a9ccc12f-dcd6-4929-9b7a-19789433ff2e req-77018ec3-8ba2-4276-a272-94f44595f2d3 service nova] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2048.628115] env[62519]: DEBUG nova.network.neutron [req-a9ccc12f-dcd6-4929-9b7a-19789433ff2e req-77018ec3-8ba2-4276-a272-94f44595f2d3 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2048.634594] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2048.634594] env[62519]: value = "task-1803516" [ 2048.634594] env[62519]: _type = "Task" [ 2048.634594] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.644805] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803516, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.660272] env[62519]: DEBUG nova.network.neutron [req-52b3aca1-1dbe-4308-a708-fbec6d3e0165 req-c7596680-5595-40aa-b8bf-26a55ec570a8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updated VIF entry in instance network info cache for port 03d52b0a-655f-4661-87b2-35d1b1af19a4. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2048.660627] env[62519]: DEBUG nova.network.neutron [req-52b3aca1-1dbe-4308-a708-fbec6d3e0165 req-c7596680-5595-40aa-b8bf-26a55ec570a8 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [{"id": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "address": "fa:16:3e:3c:7b:8d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03d52b0a-65", "ovs_interfaceid": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2049.091647] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803515, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.145665] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.163082] env[62519]: DEBUG oslo_concurrency.lockutils [req-52b3aca1-1dbe-4308-a708-fbec6d3e0165 req-c7596680-5595-40aa-b8bf-26a55ec570a8 service nova] Releasing lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2049.175845] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.176070] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.283312] env[62519]: DEBUG nova.objects.instance [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'flavor' on Instance uuid c9399643-7d74-4064-a721-e6d038a5cef0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2049.391814] env[62519]: DEBUG nova.network.neutron [req-a9ccc12f-dcd6-4929-9b7a-19789433ff2e req-77018ec3-8ba2-4276-a272-94f44595f2d3 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updated VIF entry in instance network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2049.392201] env[62519]: DEBUG nova.network.neutron [req-a9ccc12f-dcd6-4929-9b7a-19789433ff2e req-77018ec3-8ba2-4276-a272-94f44595f2d3 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2049.593258] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803515, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.646573] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.679017] env[62519]: DEBUG nova.compute.manager [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2049.789866] env[62519]: DEBUG oslo_concurrency.lockutils [None req-06c34f56-a070-4eff-9572-e8e4c978f58d tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.275s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.895253] env[62519]: DEBUG oslo_concurrency.lockutils [req-a9ccc12f-dcd6-4929-9b7a-19789433ff2e req-77018ec3-8ba2-4276-a272-94f44595f2d3 service nova] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2050.049009] env[62519]: DEBUG oslo_concurrency.lockutils [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "c9399643-7d74-4064-a721-e6d038a5cef0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.049364] env[62519]: DEBUG oslo_concurrency.lockutils [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.093528] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803515, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.147368] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.202317] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.202583] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.204590] env[62519]: INFO nova.compute.claims [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2050.553400] env[62519]: INFO nova.compute.manager [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Detaching volume d1f2e525-3506-4f2f-9d8b-8880ea1acbdc [ 2050.592412] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803515, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.594022] env[62519]: INFO nova.virt.block_device [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Attempting to driver detach volume d1f2e525-3506-4f2f-9d8b-8880ea1acbdc from mountpoint /dev/sdb [ 2050.594374] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2050.594421] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373885', 'volume_id': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'name': 'volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c9399643-7d74-4064-a721-e6d038a5cef0', 'attached_at': '', 'detached_at': '', 'volume_id': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'serial': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2050.595241] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5b1d37-d55e-40e5-8bae-45be0cf4c9fc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.618079] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d149deeb-ddd0-40d7-8f22-0c198eaa4dc9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.625753] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7f3c35-a82a-4d3c-87d9-438dcfbe46cd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.650377] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c11c234-297c-427c-b416-a572a05d246c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.658236] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803516, 'name': ReconfigVM_Task, 'duration_secs': 1.962644} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.658723] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Reconfigured VM instance instance-00000078 to attach disk [datastore1] d921e3c0-cc52-4c5a-90bc-4059ba499ff1/15793716-f1d9-4a86-9030-717adf498693-rescue.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2050.670106] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e32745-8fc1-4021-931a-9238dfdb8574 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.673057] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] The volume has not been displaced from its original location: [datastore1] volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc/volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2050.678537] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Reconfiguring VM instance instance-00000074 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2050.678915] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f46aa03d-82e8-4e21-bea5-3f793d64396c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.722281] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a00c315-a64c-4027-93fb-2d2c3799721d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.732784] env[62519]: DEBUG oslo_vmware.api [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 2050.732784] env[62519]: value = "task-1803517" [ 2050.732784] env[62519]: _type = "Task" [ 2050.732784] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.739546] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2050.739546] env[62519]: value = "task-1803518" [ 2050.739546] env[62519]: _type = "Task" [ 2050.739546] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.747302] env[62519]: DEBUG oslo_vmware.api [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.752799] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803518, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.096936] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803515, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.480067} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.097256] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f3e835e2-df44-417d-9a40-88b6c8346b2f/f3e835e2-df44-417d-9a40-88b6c8346b2f.vmdk to [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a/47439070-54d8-454c-bf1d-7a2a33d82e9a.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2051.098217] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe8dca3-7bcb-4261-8fe0-c98a0a4ccbab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.123120] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a/47439070-54d8-454c-bf1d-7a2a33d82e9a.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2051.123449] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d615283-5fda-42e0-a482-55bec1ec90af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.143541] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2051.143541] env[62519]: value = "task-1803519" [ 2051.143541] env[62519]: _type = "Task" [ 2051.143541] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.152655] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803519, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.251023] env[62519]: DEBUG oslo_vmware.api [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.254658] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803518, 'name': ReconfigVM_Task, 'duration_secs': 0.435467} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.254955] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2051.255219] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8318263-15d5-470f-ae6e-84b6b91f0a88 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.261118] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2051.261118] env[62519]: value = "task-1803520" [ 2051.261118] env[62519]: _type = "Task" [ 2051.261118] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.269228] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.341424] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bd8316-8708-47c6-91b2-9678489cbdf3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.349299] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4f0332-5c43-4f31-8b02-97981ced6fbc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.379440] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27df4443-7f45-4c13-ac56-9781ed874b58 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.386941] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033d3448-e1c0-4f86-b121-0b130ec881eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.402012] env[62519]: DEBUG nova.compute.provider_tree [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2051.653663] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803519, 'name': ReconfigVM_Task, 'duration_secs': 0.359264} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.654123] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a/47439070-54d8-454c-bf1d-7a2a33d82e9a.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2051.655105] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'guest_format': None, 'boot_index': 0, 'encryption_format': None, 'encrypted': False, 'encryption_options': None, 'device_type': 'disk', 'disk_bus': None, 'image_id': '15793716-f1d9-4a86-9030-717adf498693'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373881', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'name': 'volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '47439070-54d8-454c-bf1d-7a2a33d82e9a', 'attached_at': '', 'detached_at': '', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'serial': '64f7dc0b-1b91-40f1-85b7-8256d2cde712'}, 'guest_format': None, 'boot_index': None, 'delete_on_termination': False, 'device_type': None, 'disk_bus': None, 'mount_device': '/dev/sdb', 'attachment_id': 'd6e286e4-a7e2-48a0-a24d-650edfbc37f8', 'volume_type': None}], 'swap': None} {{(pid=62519) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2051.655325] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Volume attach. Driver type: vmdk {{(pid=62519) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2051.655540] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373881', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'name': 'volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '47439070-54d8-454c-bf1d-7a2a33d82e9a', 'attached_at': '', 'detached_at': '', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'serial': '64f7dc0b-1b91-40f1-85b7-8256d2cde712'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2051.656407] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6793c8-bdfa-4267-8645-ff30a866f0ee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.674237] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa948b6-8d55-4bd5-8503-5533553665b8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.697915] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712/volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2051.698193] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfea0901-ce49-498f-96d7-49b0a64fd59e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.715532] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2051.715532] env[62519]: value = "task-1803521" [ 2051.715532] env[62519]: _type = "Task" [ 2051.715532] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.722595] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.745106] env[62519]: DEBUG oslo_vmware.api [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803517, 'name': ReconfigVM_Task, 'duration_secs': 0.635124} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.745369] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Reconfigured VM instance instance-00000074 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2051.749945] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25bf116d-e314-4a5b-893f-432514e39bfe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.765992] env[62519]: DEBUG oslo_vmware.api [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 2051.765992] env[62519]: value = "task-1803522" [ 2051.765992] env[62519]: _type = "Task" [ 2051.765992] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.772948] env[62519]: DEBUG oslo_vmware.api [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803520, 'name': PowerOnVM_Task, 'duration_secs': 0.427996} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.773655] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2051.778157] env[62519]: DEBUG oslo_vmware.api [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803522, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.779261] env[62519]: DEBUG nova.compute.manager [None req-cd6bd682-09f6-4f89-90fc-d01e5c61d29a tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2051.780056] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e2a3d8-e6a2-42fe-8059-7927b6f68c5c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.906338] env[62519]: DEBUG nova.scheduler.client.report [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2052.228657] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.276297] env[62519]: DEBUG oslo_vmware.api [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803522, 'name': ReconfigVM_Task, 'duration_secs': 0.138159} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.276614] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373885', 'volume_id': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'name': 'volume-d1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c9399643-7d74-4064-a721-e6d038a5cef0', 'attached_at': '', 'detached_at': '', 'volume_id': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc', 'serial': 'd1f2e525-3506-4f2f-9d8b-8880ea1acbdc'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2052.411846] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.209s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2052.412381] env[62519]: DEBUG nova.compute.manager [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2052.726872] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.753411] env[62519]: INFO nova.compute.manager [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Unrescuing [ 2052.753645] env[62519]: DEBUG oslo_concurrency.lockutils [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2052.753793] env[62519]: DEBUG oslo_concurrency.lockutils [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquired lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2052.753954] env[62519]: DEBUG nova.network.neutron [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2052.815812] env[62519]: DEBUG nova.objects.instance [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'flavor' on Instance uuid c9399643-7d74-4064-a721-e6d038a5cef0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2052.917724] env[62519]: DEBUG nova.compute.utils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2052.919198] env[62519]: DEBUG nova.compute.manager [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2052.919386] env[62519]: DEBUG nova.network.neutron [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2052.970415] env[62519]: DEBUG nova.policy [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '158d93cf743840a8be4e192e6bd4b8bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd73e71476254453fb23164dce09c6d41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2053.228940] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803521, 'name': ReconfigVM_Task, 'duration_secs': 1.271902} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.229808] env[62519]: DEBUG nova.network.neutron [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Successfully created port: 16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2053.231702] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Reconfigured VM instance instance-00000072 to attach disk [datastore1] volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712/volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2053.236840] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2faab1a-db90-4368-a8fc-1760251b47a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.254019] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2053.254019] env[62519]: value = "task-1803523" [ 2053.254019] env[62519]: _type = "Task" [ 2053.254019] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.264018] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803523, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.423121] env[62519]: DEBUG nova.compute.manager [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2053.565453] env[62519]: DEBUG nova.network.neutron [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Updating instance_info_cache with network_info: [{"id": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "address": "fa:16:3e:a2:ac:b7", "network": {"id": "8bab7e32-d01d-4dff-8b50-f49f6a04314c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1759123998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8eb0014c990b48ba8f9cfc5629d72f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0b67f8d-3f", "ovs_interfaceid": "c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2053.765800] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803523, 'name': ReconfigVM_Task, 'duration_secs': 0.139258} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.766177] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373881', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'name': 'volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '47439070-54d8-454c-bf1d-7a2a33d82e9a', 'attached_at': '', 'detached_at': '', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'serial': '64f7dc0b-1b91-40f1-85b7-8256d2cde712'} {{(pid=62519) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2053.766780] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ba08de0-8927-4194-ad10-f984992115ba {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.773715] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2053.773715] env[62519]: value = "task-1803524" [ 2053.773715] env[62519]: _type = "Task" [ 2053.773715] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.781887] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803524, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.822493] env[62519]: DEBUG oslo_concurrency.lockutils [None req-620782ae-1b2b-4e86-98c3-124d75c99986 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.773s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.067769] env[62519]: DEBUG oslo_concurrency.lockutils [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Releasing lock "refresh_cache-d921e3c0-cc52-4c5a-90bc-4059ba499ff1" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.068499] env[62519]: DEBUG nova.objects.instance [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lazy-loading 'flavor' on Instance uuid d921e3c0-cc52-4c5a-90bc-4059ba499ff1 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2054.283760] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803524, 'name': Rename_Task, 'duration_secs': 0.20661} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.284053] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2054.284300] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4eea1f3b-c3ef-47ce-87d8-302041a993ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.290978] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2054.290978] env[62519]: value = "task-1803525" [ 2054.290978] env[62519]: _type = "Task" [ 2054.290978] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.297916] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803525, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.332036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "c9399643-7d74-4064-a721-e6d038a5cef0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.332036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.332036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "c9399643-7d74-4064-a721-e6d038a5cef0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.332322] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.332358] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.334351] env[62519]: INFO nova.compute.manager [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Terminating instance [ 2054.432269] env[62519]: DEBUG nova.compute.manager [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2054.462328] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2054.462615] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2054.462768] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2054.462960] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2054.463105] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2054.463259] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2054.463460] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2054.463621] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2054.463867] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2054.463956] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2054.464145] env[62519]: DEBUG nova.virt.hardware [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2054.465026] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b682bdc-a362-4985-bf08-15c100fd59bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.473464] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd84f984-8d27-45b4-944e-a0a14efef53c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.574458] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58dd69ee-e62a-47c2-a23c-b9578477870b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.595630] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2054.595921] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cba7ef10-b427-42e6-ac65-5183b40e6e83 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.602362] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2054.602362] env[62519]: value = "task-1803526" [ 2054.602362] env[62519]: _type = "Task" [ 2054.602362] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.609862] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803526, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.781484] env[62519]: DEBUG nova.compute.manager [req-276381ff-8b26-406d-937c-559801b1916c req-bfdba479-51c5-42ab-bb50-da311db7150c service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received event network-vif-plugged-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2054.781771] env[62519]: DEBUG oslo_concurrency.lockutils [req-276381ff-8b26-406d-937c-559801b1916c req-bfdba479-51c5-42ab-bb50-da311db7150c service nova] Acquiring lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.781991] env[62519]: DEBUG oslo_concurrency.lockutils [req-276381ff-8b26-406d-937c-559801b1916c req-bfdba479-51c5-42ab-bb50-da311db7150c service nova] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.782105] env[62519]: DEBUG oslo_concurrency.lockutils [req-276381ff-8b26-406d-937c-559801b1916c req-bfdba479-51c5-42ab-bb50-da311db7150c service nova] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.782274] env[62519]: DEBUG nova.compute.manager [req-276381ff-8b26-406d-937c-559801b1916c req-bfdba479-51c5-42ab-bb50-da311db7150c service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] No waiting events found dispatching network-vif-plugged-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2054.782457] env[62519]: WARNING nova.compute.manager [req-276381ff-8b26-406d-937c-559801b1916c req-bfdba479-51c5-42ab-bb50-da311db7150c service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received unexpected event network-vif-plugged-16e1aad6-277c-4fb9-95ca-00a66fecb425 for instance with vm_state building and task_state spawning. [ 2054.800834] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803525, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.838255] env[62519]: DEBUG nova.compute.manager [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2054.838698] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2054.840373] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b560354-f41a-44bf-8b2a-ed742e452486 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.850773] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2054.851323] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ab75d48-4941-4bd7-82f0-050edeeeb6b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.858551] env[62519]: DEBUG oslo_vmware.api [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 2054.858551] env[62519]: value = "task-1803527" [ 2054.858551] env[62519]: _type = "Task" [ 2054.858551] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.866819] env[62519]: DEBUG oslo_vmware.api [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803527, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.877360] env[62519]: DEBUG nova.network.neutron [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Successfully updated port: 16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2055.111461] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803526, 'name': PowerOffVM_Task, 'duration_secs': 0.195177} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.111723] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2055.117194] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Reconfiguring VM instance instance-00000078 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2055.117486] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95b1c15c-265b-46fa-8d9c-5b98c813d44c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.135134] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2055.135134] env[62519]: value = "task-1803528" [ 2055.135134] env[62519]: _type = "Task" [ 2055.135134] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.143578] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803528, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.305834] env[62519]: DEBUG oslo_vmware.api [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803525, 'name': PowerOnVM_Task, 'duration_secs': 0.642175} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.306170] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2055.368583] env[62519]: DEBUG oslo_vmware.api [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803527, 'name': PowerOffVM_Task, 'duration_secs': 0.372917} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.368852] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2055.369032] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2055.369363] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f23de541-76db-4558-8f50-a80ab6cf503f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.379602] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2055.379733] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2055.379878] env[62519]: DEBUG nova.network.neutron [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2055.411751] env[62519]: DEBUG nova.compute.manager [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2055.412688] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89f6d21-f717-4b3e-8a8f-029fe841ca39 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.451241] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2055.451544] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2055.451776] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleting the datastore file [datastore1] c9399643-7d74-4064-a721-e6d038a5cef0 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2055.452129] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a21ba3b-08ef-4c0e-8d0c-ec451f0aad57 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.458756] env[62519]: DEBUG oslo_vmware.api [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for the task: (returnval){ [ 2055.458756] env[62519]: value = "task-1803530" [ 2055.458756] env[62519]: _type = "Task" [ 2055.458756] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.468676] env[62519]: DEBUG oslo_vmware.api [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.645724] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803528, 'name': ReconfigVM_Task, 'duration_secs': 0.466214} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.645724] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Reconfigured VM instance instance-00000078 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2055.645724] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2055.645960] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-914312e7-1ad7-4065-82ed-9032bafd7d84 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.653901] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2055.653901] env[62519]: value = "task-1803531" [ 2055.653901] env[62519]: _type = "Task" [ 2055.653901] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.661087] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.912295] env[62519]: DEBUG nova.network.neutron [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2055.931008] env[62519]: DEBUG oslo_concurrency.lockutils [None req-5505890f-601f-4728-a979-16c777543556 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 34.127s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.969696] env[62519]: DEBUG oslo_vmware.api [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Task: {'id': task-1803530, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239644} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.969940] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2055.970144] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2055.970326] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2055.970499] env[62519]: INFO nova.compute.manager [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2055.970739] env[62519]: DEBUG oslo.service.loopingcall [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2055.970927] env[62519]: DEBUG nova.compute.manager [-] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2055.971017] env[62519]: DEBUG nova.network.neutron [-] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2056.128994] env[62519]: DEBUG nova.network.neutron [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [{"id": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "address": "fa:16:3e:4e:44:c5", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16e1aad6-27", "ovs_interfaceid": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2056.165644] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803531, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.632080] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2056.632540] env[62519]: DEBUG nova.compute.manager [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Instance network_info: |[{"id": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "address": "fa:16:3e:4e:44:c5", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16e1aad6-27", "ovs_interfaceid": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2056.633102] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:44:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16e1aad6-277c-4fb9-95ca-00a66fecb425', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2056.645225] env[62519]: DEBUG oslo.service.loopingcall [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2056.645502] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2056.645803] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c055890-729e-44df-b208-fd54e94ea7bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.680912] env[62519]: DEBUG oslo_vmware.api [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803531, 'name': PowerOnVM_Task, 'duration_secs': 0.778994} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.682470] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2056.682776] env[62519]: DEBUG nova.compute.manager [None req-405edede-9ad1-44bc-9dc5-248962647118 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2056.683090] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2056.683090] env[62519]: value = "task-1803532" [ 2056.683090] env[62519]: _type = "Task" [ 2056.683090] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.684011] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392a774b-7404-47f7-b6b3-bb3217127cd0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.702023] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803532, 'name': CreateVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.812330] env[62519]: DEBUG nova.compute.manager [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received event network-changed-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2056.812629] env[62519]: DEBUG nova.compute.manager [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Refreshing instance network info cache due to event network-changed-16e1aad6-277c-4fb9-95ca-00a66fecb425. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2056.813372] env[62519]: DEBUG oslo_concurrency.lockutils [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] Acquiring lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2056.813532] env[62519]: DEBUG oslo_concurrency.lockutils [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] Acquired lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2056.813769] env[62519]: DEBUG nova.network.neutron [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Refreshing network info cache for port 16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2057.051161] env[62519]: DEBUG nova.network.neutron [-] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2057.197411] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803532, 'name': CreateVM_Task, 'duration_secs': 0.470269} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.197580] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2057.198281] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.198448] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.198762] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2057.199012] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90d3a90c-8808-4874-a21f-877afa4324bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.206429] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2057.206429] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52de46d0-d40f-782e-c528-adf4d9377fd0" [ 2057.206429] env[62519]: _type = "Task" [ 2057.206429] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.215744] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52de46d0-d40f-782e-c528-adf4d9377fd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.521463] env[62519]: DEBUG nova.network.neutron [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updated VIF entry in instance network info cache for port 16e1aad6-277c-4fb9-95ca-00a66fecb425. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2057.521926] env[62519]: DEBUG nova.network.neutron [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [{"id": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "address": "fa:16:3e:4e:44:c5", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16e1aad6-27", "ovs_interfaceid": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2057.554605] env[62519]: INFO nova.compute.manager [-] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Took 1.58 seconds to deallocate network for instance. [ 2057.718821] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52de46d0-d40f-782e-c528-adf4d9377fd0, 'name': SearchDatastore_Task, 'duration_secs': 0.017389} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.718998] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2057.719250] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2057.719551] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.719699] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.719874] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2057.720663] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7ebf3b4-5fec-440a-90d9-fbc16c69e01f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.729910] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2057.730164] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2057.731089] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ef52e1e-95e1-4349-9aef-f9b7ae0129d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.736254] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2057.736254] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e40c4d-5576-04cb-2cba-f731a19dc3fd" [ 2057.736254] env[62519]: _type = "Task" [ 2057.736254] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.744142] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e40c4d-5576-04cb-2cba-f731a19dc3fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.785167] env[62519]: DEBUG oslo_concurrency.lockutils [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-a18af9b7-4548-42d1-8459-508298cb96dc-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.785464] env[62519]: DEBUG oslo_concurrency.lockutils [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-a18af9b7-4548-42d1-8459-508298cb96dc-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.785899] env[62519]: DEBUG nova.objects.instance [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'flavor' on Instance uuid a18af9b7-4548-42d1-8459-508298cb96dc {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2058.025016] env[62519]: DEBUG oslo_concurrency.lockutils [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] Releasing lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.025290] env[62519]: DEBUG nova.compute.manager [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Received event network-vif-deleted-a8915627-fabb-4472-92ee-8c09b3c07a92 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2058.025468] env[62519]: INFO nova.compute.manager [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Neutron deleted interface a8915627-fabb-4472-92ee-8c09b3c07a92; detaching it from the instance and deleting it from the info cache [ 2058.025638] env[62519]: DEBUG nova.network.neutron [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.060935] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.061246] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.061469] env[62519]: DEBUG nova.objects.instance [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lazy-loading 'resources' on Instance uuid c9399643-7d74-4064-a721-e6d038a5cef0 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2058.246942] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52e40c4d-5576-04cb-2cba-f731a19dc3fd, 'name': SearchDatastore_Task, 'duration_secs': 0.010558} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.248070] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43874aa6-a76e-4d73-b643-386b6f50caa1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.253919] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2058.253919] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5292fb35-f969-81fa-c76e-72df292080dc" [ 2058.253919] env[62519]: _type = "Task" [ 2058.253919] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.261832] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5292fb35-f969-81fa-c76e-72df292080dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.380696] env[62519]: DEBUG nova.objects.instance [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'pci_requests' on Instance uuid a18af9b7-4548-42d1-8459-508298cb96dc {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2058.489767] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.490757] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.490868] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.491044] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.491236] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.493405] env[62519]: INFO nova.compute.manager [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Terminating instance [ 2058.528588] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24366aaf-44d5-4f75-baeb-be9aa66d9b70 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.538335] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4c016d-daf0-4047-bfe1-0dbac6b013e5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.572949] env[62519]: DEBUG nova.compute.manager [req-c51931b5-334b-44bf-b425-ee294d2aa8b3 req-fe465472-422b-4eca-bc09-006fe4bd76e6 service nova] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Detach interface failed, port_id=a8915627-fabb-4472-92ee-8c09b3c07a92, reason: Instance c9399643-7d74-4064-a721-e6d038a5cef0 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2058.682549] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f917f57d-3dad-4abe-8e47-63cfae0baf71 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.690416] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606505cd-4b33-4958-9fe2-c679f44adad9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.722649] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b692b542-9fe1-458e-a75e-7bb9a58c7849 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.730850] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c506c0b4-abf0-4be3-aae7-4b31055bdf9e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.744667] env[62519]: DEBUG nova.compute.provider_tree [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2058.763829] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5292fb35-f969-81fa-c76e-72df292080dc, 'name': SearchDatastore_Task, 'duration_secs': 0.033035} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.764132] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.764390] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7/812842c2-ac72-4d12-b2f7-3ccfe77a13a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2058.764714] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03818c32-467f-4b0f-8a87-3b3004572e88 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.771597] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2058.771597] env[62519]: value = "task-1803533" [ 2058.771597] env[62519]: _type = "Task" [ 2058.771597] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.779857] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.883692] env[62519]: DEBUG nova.objects.base [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2058.884052] env[62519]: DEBUG nova.network.neutron [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2058.951416] env[62519]: DEBUG nova.policy [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eca5c7b079d4785941d68d7c51df5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63a46158057949478e5c79fbe0d4d5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2058.997471] env[62519]: DEBUG nova.compute.manager [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2058.997713] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2058.998764] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2399d59-7f53-4c95-90b9-06462c21930b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.008967] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2059.009299] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b15a12fd-c745-4945-b6f4-3a52442e1ff0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.017306] env[62519]: DEBUG oslo_vmware.api [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2059.017306] env[62519]: value = "task-1803534" [ 2059.017306] env[62519]: _type = "Task" [ 2059.017306] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.027840] env[62519]: DEBUG oslo_vmware.api [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.247893] env[62519]: DEBUG nova.scheduler.client.report [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2059.282601] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803533, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.527098] env[62519]: DEBUG oslo_vmware.api [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803534, 'name': PowerOffVM_Task, 'duration_secs': 0.398752} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.527333] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2059.527497] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2059.527740] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17e04ac6-f877-4de1-94fb-af0849387aee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.734132] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2059.734359] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2059.734541] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Deleting the datastore file [datastore1] d921e3c0-cc52-4c5a-90bc-4059ba499ff1 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2059.734809] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3900db81-b75c-4a32-8f27-a33753c4c005 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.741262] env[62519]: DEBUG oslo_vmware.api [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2059.741262] env[62519]: value = "task-1803536" [ 2059.741262] env[62519]: _type = "Task" [ 2059.741262] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.749171] env[62519]: DEBUG oslo_vmware.api [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.754090] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.693s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.761615] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.761864] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.776374] env[62519]: INFO nova.scheduler.client.report [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Deleted allocations for instance c9399643-7d74-4064-a721-e6d038a5cef0 [ 2059.782881] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803533, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531534} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.783328] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7/812842c2-ac72-4d12-b2f7-3ccfe77a13a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2059.783548] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2059.783785] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1dd1c879-55b7-4e8d-970a-b42694583efc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.793196] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2059.793196] env[62519]: value = "task-1803537" [ 2059.793196] env[62519]: _type = "Task" [ 2059.793196] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.802646] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803537, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.251662] env[62519]: DEBUG oslo_vmware.api [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.287183] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bff2151d-2c21-4228-b239-a4f065fe89c9 tempest-AttachVolumeNegativeTest-889445199 tempest-AttachVolumeNegativeTest-889445199-project-member] Lock "c9399643-7d74-4064-a721-e6d038a5cef0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.955s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.303342] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803537, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069569} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.303474] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2060.304427] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2380fa7f-6edf-4603-8ea7-5fe3f8472b89 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.326471] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7/812842c2-ac72-4d12-b2f7-3ccfe77a13a7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2060.327028] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28a60493-83b2-4f13-aec3-fc33129f7c89 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.346531] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2060.346531] env[62519]: value = "task-1803538" [ 2060.346531] env[62519]: _type = "Task" [ 2060.346531] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.354229] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803538, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.378687] env[62519]: DEBUG nova.compute.manager [req-c9c0a346-9640-4a31-ada7-b22d65ca0fc1 req-1fdf651d-874e-4f61-8a16-b0e6abc043e9 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received event network-vif-plugged-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2060.378793] env[62519]: DEBUG oslo_concurrency.lockutils [req-c9c0a346-9640-4a31-ada7-b22d65ca0fc1 req-1fdf651d-874e-4f61-8a16-b0e6abc043e9 service nova] Acquiring lock "a18af9b7-4548-42d1-8459-508298cb96dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.379038] env[62519]: DEBUG oslo_concurrency.lockutils [req-c9c0a346-9640-4a31-ada7-b22d65ca0fc1 req-1fdf651d-874e-4f61-8a16-b0e6abc043e9 service nova] Lock "a18af9b7-4548-42d1-8459-508298cb96dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.379161] env[62519]: DEBUG oslo_concurrency.lockutils [req-c9c0a346-9640-4a31-ada7-b22d65ca0fc1 req-1fdf651d-874e-4f61-8a16-b0e6abc043e9 service nova] Lock "a18af9b7-4548-42d1-8459-508298cb96dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.379358] env[62519]: DEBUG nova.compute.manager [req-c9c0a346-9640-4a31-ada7-b22d65ca0fc1 req-1fdf651d-874e-4f61-8a16-b0e6abc043e9 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] No waiting events found dispatching network-vif-plugged-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2060.379541] env[62519]: WARNING nova.compute.manager [req-c9c0a346-9640-4a31-ada7-b22d65ca0fc1 req-1fdf651d-874e-4f61-8a16-b0e6abc043e9 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received unexpected event network-vif-plugged-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c for instance with vm_state active and task_state None. [ 2060.463723] env[62519]: DEBUG nova.network.neutron [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Successfully updated port: bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2060.752504] env[62519]: DEBUG oslo_vmware.api [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.557684} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.752779] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2060.752985] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2060.753222] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2060.753435] env[62519]: INFO nova.compute.manager [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Took 1.76 seconds to destroy the instance on the hypervisor. [ 2060.753707] env[62519]: DEBUG oslo.service.loopingcall [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2060.753912] env[62519]: DEBUG nova.compute.manager [-] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2060.754011] env[62519]: DEBUG nova.network.neutron [-] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2060.857226] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.966353] env[62519]: DEBUG oslo_concurrency.lockutils [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.966672] env[62519]: DEBUG oslo_concurrency.lockutils [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.966978] env[62519]: DEBUG nova.network.neutron [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2061.357776] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803538, 'name': ReconfigVM_Task, 'duration_secs': 0.669221} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.358121] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7/812842c2-ac72-4d12-b2f7-3ccfe77a13a7.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2061.358767] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-015ca097-651d-4d79-a4f4-b6a03ba7131e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.365582] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2061.365582] env[62519]: value = "task-1803540" [ 2061.365582] env[62519]: _type = "Task" [ 2061.365582] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.373739] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803540, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.515312] env[62519]: WARNING nova.network.neutron [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] 3996e7f6-f093-4152-af91-6fb77f32a1c5 already exists in list: networks containing: ['3996e7f6-f093-4152-af91-6fb77f32a1c5']. ignoring it [ 2061.527748] env[62519]: DEBUG nova.network.neutron [-] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.778940] env[62519]: DEBUG nova.network.neutron [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "address": "fa:16:3e:ec:b1:50", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc92fa6c-1a", "ovs_interfaceid": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.875242] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803540, 'name': Rename_Task, 'duration_secs': 0.138696} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.875511] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2061.875749] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85cdabf8-1f37-4d55-84fc-910ce4e110ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.881893] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2061.881893] env[62519]: value = "task-1803541" [ 2061.881893] env[62519]: _type = "Task" [ 2061.881893] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.889227] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803541, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.030533] env[62519]: INFO nova.compute.manager [-] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Took 1.28 seconds to deallocate network for instance. [ 2062.282025] env[62519]: DEBUG oslo_concurrency.lockutils [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.282780] env[62519]: DEBUG oslo_concurrency.lockutils [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.282940] env[62519]: DEBUG oslo_concurrency.lockutils [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.283794] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ed8c19-79da-421c-8cda-7fe411391b5a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.302508] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2062.302776] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2062.302884] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2062.303073] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2062.303220] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2062.303364] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2062.303568] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2062.303751] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2062.303928] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2062.304103] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2062.304278] env[62519]: DEBUG nova.virt.hardware [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2062.310420] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Reconfiguring VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2062.310697] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c6f4365-512d-4e7f-98a2-9e43c2c9e5f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.328136] env[62519]: DEBUG oslo_vmware.api [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2062.328136] env[62519]: value = "task-1803542" [ 2062.328136] env[62519]: _type = "Task" [ 2062.328136] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.335610] env[62519]: DEBUG oslo_vmware.api [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803542, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.391729] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803541, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.446566] env[62519]: DEBUG nova.compute.manager [req-ebbf5053-fa8a-4b7b-960b-886584ecc8a5 req-e081dbf7-5bbd-4397-9a23-aa02ea702d3b service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received event network-changed-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2062.446730] env[62519]: DEBUG nova.compute.manager [req-ebbf5053-fa8a-4b7b-960b-886584ecc8a5 req-e081dbf7-5bbd-4397-9a23-aa02ea702d3b service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing instance network info cache due to event network-changed-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2062.446933] env[62519]: DEBUG oslo_concurrency.lockutils [req-ebbf5053-fa8a-4b7b-960b-886584ecc8a5 req-e081dbf7-5bbd-4397-9a23-aa02ea702d3b service nova] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.447168] env[62519]: DEBUG oslo_concurrency.lockutils [req-ebbf5053-fa8a-4b7b-960b-886584ecc8a5 req-e081dbf7-5bbd-4397-9a23-aa02ea702d3b service nova] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.447336] env[62519]: DEBUG nova.network.neutron [req-ebbf5053-fa8a-4b7b-960b-886584ecc8a5 req-e081dbf7-5bbd-4397-9a23-aa02ea702d3b service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing network info cache for port bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2062.537576] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2062.537788] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2062.538011] env[62519]: DEBUG nova.objects.instance [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lazy-loading 'resources' on Instance uuid d921e3c0-cc52-4c5a-90bc-4059ba499ff1 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2062.839217] env[62519]: DEBUG oslo_vmware.api [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803542, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.892413] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803541, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.144123] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5392bb-5cce-403d-ac1c-c96a40b8d7de {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.151951] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d645f04-c705-4fe3-bb9c-14ee8c578650 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.183517] env[62519]: DEBUG nova.network.neutron [req-ebbf5053-fa8a-4b7b-960b-886584ecc8a5 req-e081dbf7-5bbd-4397-9a23-aa02ea702d3b service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updated VIF entry in instance network info cache for port bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2063.183909] env[62519]: DEBUG nova.network.neutron [req-ebbf5053-fa8a-4b7b-960b-886584ecc8a5 req-e081dbf7-5bbd-4397-9a23-aa02ea702d3b service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "address": "fa:16:3e:ec:b1:50", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc92fa6c-1a", "ovs_interfaceid": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.185628] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cf2669-1ecb-446a-8b3d-3f9e616bbc88 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.193477] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6791db88-d552-4b1e-b326-9cb8ab5c92e0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.207547] env[62519]: DEBUG nova.compute.provider_tree [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2063.264144] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2063.338968] env[62519]: DEBUG oslo_vmware.api [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803542, 'name': ReconfigVM_Task, 'duration_secs': 0.543694} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.339451] env[62519]: DEBUG oslo_concurrency.lockutils [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.339685] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Reconfigured VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2063.394050] env[62519]: DEBUG oslo_vmware.api [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803541, 'name': PowerOnVM_Task, 'duration_secs': 1.075548} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.394050] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2063.394050] env[62519]: INFO nova.compute.manager [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Took 8.96 seconds to spawn the instance on the hypervisor. [ 2063.394050] env[62519]: DEBUG nova.compute.manager [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2063.394755] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22543e6d-dd51-420f-845a-2f5e39b7842f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.689326] env[62519]: DEBUG oslo_concurrency.lockutils [req-ebbf5053-fa8a-4b7b-960b-886584ecc8a5 req-e081dbf7-5bbd-4397-9a23-aa02ea702d3b service nova] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.689775] env[62519]: DEBUG nova.compute.manager [req-ebbf5053-fa8a-4b7b-960b-886584ecc8a5 req-e081dbf7-5bbd-4397-9a23-aa02ea702d3b service nova] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Received event network-vif-deleted-c0b67f8d-3fcb-4162-bf30-5b5b4ce5f5a6 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2063.728653] env[62519]: ERROR nova.scheduler.client.report [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [req-fcca8819-74f4-4e8c-b4fc-37f745991628] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID f8ca0d98-9158-4b85-ae0e-b106f966dd44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fcca8819-74f4-4e8c-b4fc-37f745991628"}]} [ 2063.744954] env[62519]: DEBUG nova.scheduler.client.report [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Refreshing inventories for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2063.759071] env[62519]: DEBUG nova.scheduler.client.report [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Updating ProviderTree inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2063.759306] env[62519]: DEBUG nova.compute.provider_tree [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 158, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2063.761639] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2063.762063] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2063.772122] env[62519]: DEBUG nova.scheduler.client.report [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Refreshing aggregate associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, aggregates: None {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2063.790674] env[62519]: DEBUG nova.scheduler.client.report [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Refreshing trait associations for resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62519) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2063.843718] env[62519]: DEBUG oslo_concurrency.lockutils [None req-787adc58-befb-49eb-9389-cd027f3c17f7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-a18af9b7-4548-42d1-8459-508298cb96dc-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.058s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.890824] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6a2f25-3e3c-4225-aafe-588aa80f7c39 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.898588] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119784c2-989c-499e-ba98-19b935b3700a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.936021] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275ed7c2-cc29-480e-9bc5-946d5916d504 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.939272] env[62519]: INFO nova.compute.manager [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Took 13.76 seconds to build instance. [ 2063.945677] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428848d0-0610-4a99-9f92-633b68350811 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.959900] env[62519]: DEBUG nova.compute.provider_tree [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2064.265835] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.506746] env[62519]: DEBUG nova.scheduler.client.report [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Updated inventory for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with generation 179 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2064.507099] env[62519]: DEBUG nova.compute.provider_tree [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Updating resource provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 generation from 179 to 180 during operation: update_inventory {{(pid=62519) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2064.507292] env[62519]: DEBUG nova.compute.provider_tree [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Updating inventory in ProviderTree for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2064.625210] env[62519]: DEBUG nova.compute.manager [req-c613809c-f605-41ed-b8d7-cf4c4a01e048 req-c3855b7a-eefc-48c9-84ac-0c71e09ed2d4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received event network-changed-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2064.625396] env[62519]: DEBUG nova.compute.manager [req-c613809c-f605-41ed-b8d7-cf4c4a01e048 req-c3855b7a-eefc-48c9-84ac-0c71e09ed2d4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Refreshing instance network info cache due to event network-changed-16e1aad6-277c-4fb9-95ca-00a66fecb425. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2064.625603] env[62519]: DEBUG oslo_concurrency.lockutils [req-c613809c-f605-41ed-b8d7-cf4c4a01e048 req-c3855b7a-eefc-48c9-84ac-0c71e09ed2d4 service nova] Acquiring lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.625745] env[62519]: DEBUG oslo_concurrency.lockutils [req-c613809c-f605-41ed-b8d7-cf4c4a01e048 req-c3855b7a-eefc-48c9-84ac-0c71e09ed2d4 service nova] Acquired lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.625901] env[62519]: DEBUG nova.network.neutron [req-c613809c-f605-41ed-b8d7-cf4c4a01e048 req-c3855b7a-eefc-48c9-84ac-0c71e09ed2d4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Refreshing network info cache for port 16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2065.013091] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.474s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.014703] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.749s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.014900] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.015070] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2065.015945] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2379702-7f7b-4fb7-8908-018d6606a34a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.025169] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11dc0e3-a613-425f-a281-258d1483ad8c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.041369] env[62519]: INFO nova.scheduler.client.report [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Deleted allocations for instance d921e3c0-cc52-4c5a-90bc-4059ba499ff1 [ 2065.042815] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3dc0e9-9c47-4472-af65-1f949ac9df6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.051680] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689d521c-1493-48f8-b17a-1f6e09879903 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.082888] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181126MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2065.083060] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.083241] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.332882] env[62519]: DEBUG nova.network.neutron [req-c613809c-f605-41ed-b8d7-cf4c4a01e048 req-c3855b7a-eefc-48c9-84ac-0c71e09ed2d4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updated VIF entry in instance network info cache for port 16e1aad6-277c-4fb9-95ca-00a66fecb425. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2065.333264] env[62519]: DEBUG nova.network.neutron [req-c613809c-f605-41ed-b8d7-cf4c4a01e048 req-c3855b7a-eefc-48c9-84ac-0c71e09ed2d4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [{"id": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "address": "fa:16:3e:4e:44:c5", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16e1aad6-27", "ovs_interfaceid": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.446871] env[62519]: DEBUG oslo_concurrency.lockutils [None req-86ba8852-eb51-46ae-b862-04cc85e62dfe tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.271s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.552084] env[62519]: DEBUG oslo_concurrency.lockutils [None req-0cc51e69-3302-4abd-93dc-2838ebbb336c tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "d921e3c0-cc52-4c5a-90bc-4059ba499ff1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.061s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.836057] env[62519]: DEBUG oslo_concurrency.lockutils [req-c613809c-f605-41ed-b8d7-cf4c4a01e048 req-c3855b7a-eefc-48c9-84ac-0c71e09ed2d4 service nova] Releasing lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.114978] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 90cdb4a1-6192-42e8-ad30-90faeefa4d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.115193] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance a18af9b7-4548-42d1-8459-508298cb96dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.115332] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 4049de7f-4ace-4017-8f9a-63817de5f81c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.115482] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 47439070-54d8-454c-bf1d-7a2a33d82e9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.115608] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 39c292a3-7032-4845-9ae5-2a41d13db305 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.115756] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.115991] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2066.116163] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2066.177898] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.178169] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.178374] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.178560] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.178717] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.180950] env[62519]: INFO nova.compute.manager [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Terminating instance [ 2066.199934] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6564ed4d-a195-47ca-a4bf-f63531c23bb1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.209186] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba272dd6-931a-4f3b-a3be-aeafcf35450a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.239996] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fdf564-c0f5-44ee-a060-859e272f751e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.247695] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debdb8e4-cd56-4650-961b-d31bb366f9da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.261976] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2066.462611] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-a18af9b7-4548-42d1-8459-508298cb96dc-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.463044] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-a18af9b7-4548-42d1-8459-508298cb96dc-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.684317] env[62519]: DEBUG nova.compute.manager [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2066.684688] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2066.685427] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4588bdbc-2a85-4048-9bac-557c82421d36 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.693141] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2066.693368] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b7d3e38-e821-46b0-933e-38b847cbe85a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.700087] env[62519]: DEBUG oslo_vmware.api [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2066.700087] env[62519]: value = "task-1803544" [ 2066.700087] env[62519]: _type = "Task" [ 2066.700087] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.707431] env[62519]: DEBUG oslo_vmware.api [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.766569] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2066.966188] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.966388] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.967749] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fabc28-6349-4259-9e3f-69c22eec15ef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.987193] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60abcfe8-f28c-4091-825e-5d602018d535 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.014576] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Reconfiguring VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2067.014900] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a787d214-dc10-4ffd-b6e2-c092e38ba98b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.034513] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2067.034513] env[62519]: value = "task-1803545" [ 2067.034513] env[62519]: _type = "Task" [ 2067.034513] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.043539] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.210955] env[62519]: DEBUG oslo_vmware.api [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803544, 'name': PowerOffVM_Task, 'duration_secs': 0.198695} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.211186] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2067.211359] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2067.211659] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11939f85-9966-41ea-84b4-06f11c29604f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.272476] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2067.272476] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.189s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.272476] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2067.272476] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11720}} [ 2067.305166] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2067.305405] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2067.305596] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Deleting the datastore file [datastore1] 90cdb4a1-6192-42e8-ad30-90faeefa4d18 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2067.305888] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92dfdc7e-1a24-482c-b07b-79ed203575ab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.314132] env[62519]: DEBUG oslo_vmware.api [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for the task: (returnval){ [ 2067.314132] env[62519]: value = "task-1803547" [ 2067.314132] env[62519]: _type = "Task" [ 2067.314132] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.322942] env[62519]: DEBUG oslo_vmware.api [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.544921] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.785497] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] There are 43 instances to clean {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11729}} [ 2067.785891] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: d921e3c0-cc52-4c5a-90bc-4059ba499ff1] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2067.825639] env[62519]: DEBUG oslo_vmware.api [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Task: {'id': task-1803547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207818} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.825894] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2067.826101] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2067.826283] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2067.826451] env[62519]: INFO nova.compute.manager [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2067.826708] env[62519]: DEBUG oslo.service.loopingcall [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2067.826889] env[62519]: DEBUG nova.compute.manager [-] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2067.826981] env[62519]: DEBUG nova.network.neutron [-] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2068.046711] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.180852] env[62519]: DEBUG nova.compute.manager [req-d5ff5542-5c51-42df-99d0-a71bb30b047f req-6afdb5c9-ec51-4894-8059-9dcd1e7b40c7 service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Received event network-vif-deleted-9c28f995-e2b2-4556-a305-8b9194fd6e11 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2068.181178] env[62519]: INFO nova.compute.manager [req-d5ff5542-5c51-42df-99d0-a71bb30b047f req-6afdb5c9-ec51-4894-8059-9dcd1e7b40c7 service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Neutron deleted interface 9c28f995-e2b2-4556-a305-8b9194fd6e11; detaching it from the instance and deleting it from the info cache [ 2068.181274] env[62519]: DEBUG nova.network.neutron [req-d5ff5542-5c51-42df-99d0-a71bb30b047f req-6afdb5c9-ec51-4894-8059-9dcd1e7b40c7 service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.289864] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c9399643-7d74-4064-a721-e6d038a5cef0] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2068.545833] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.660841] env[62519]: DEBUG nova.network.neutron [-] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.683340] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23e7861b-e724-4621-a63a-aa5b276662a2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.693627] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365f328f-614b-42b7-847b-09c53129591a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.720691] env[62519]: DEBUG nova.compute.manager [req-d5ff5542-5c51-42df-99d0-a71bb30b047f req-6afdb5c9-ec51-4894-8059-9dcd1e7b40c7 service nova] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Detach interface failed, port_id=9c28f995-e2b2-4556-a305-8b9194fd6e11, reason: Instance 90cdb4a1-6192-42e8-ad30-90faeefa4d18 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2068.792829] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 475a4dce-fa1c-4951-b87c-5b5ba6a0f1b0] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2069.046415] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.073143] env[62519]: DEBUG nova.compute.manager [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Stashing vm_state: active {{(pid=62519) _prep_resize /opt/stack/nova/nova/compute/manager.py:6081}} [ 2069.163881] env[62519]: INFO nova.compute.manager [-] [instance: 90cdb4a1-6192-42e8-ad30-90faeefa4d18] Took 1.34 seconds to deallocate network for instance. [ 2069.296671] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 9bf88b2f-63f9-466b-8669-45f17319055d] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2069.548578] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.592199] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.592474] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.669816] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.800423] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 884ba724-1203-4513-a196-8af5258ac731] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2070.047243] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.097575] env[62519]: INFO nova.compute.claims [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2070.305054] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: c884a374-ffb8-48db-97bb-d64a687694d5] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2070.548494] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.604093] env[62519]: INFO nova.compute.resource_tracker [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating resource usage from migration d672b29e-b5ae-40b8-b88e-4dad422f550a [ 2070.702260] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2515e8f-6bd0-4af4-a05e-34542f0fc111 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.709923] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b9fecb-b875-4404-8dca-4afed1d161c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.742826] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03eabebd-f6c0-49ac-90f9-22fd5a43e4f4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.750539] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2cc589-58c4-4ca4-9ab9-f50ecb86bb49 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.765067] env[62519]: DEBUG nova.compute.provider_tree [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2070.807169] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 8e77653a-2e04-4ed7-a419-289bd4b899d7] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2071.052009] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.268636] env[62519]: DEBUG nova.scheduler.client.report [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2071.310231] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 7bf9ee64-6c45-480e-959f-ff8395b7c446] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2071.551054] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.778811] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.186s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.779049] env[62519]: INFO nova.compute.manager [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Migrating [ 2071.789911] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.120s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.792579] env[62519]: DEBUG nova.objects.instance [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lazy-loading 'resources' on Instance uuid 90cdb4a1-6192-42e8-ad30-90faeefa4d18 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2071.818603] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4719e47d-dc12-4f9c-aff3-46b083bd7e48] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2072.054414] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.301042] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.301305] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.301396] env[62519]: DEBUG nova.network.neutron [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2072.322446] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: a0c60d49-83bb-434b-815c-c39e7493cbb7] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2072.413439] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d05367a-9fec-40cc-aca2-246e0322b077 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.421400] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7502ac4e-d74c-410f-b607-a18371e1e96c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.453838] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1735698-0748-4c00-b0e9-7a3950b3bab2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.461692] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc9dedc-23a7-44f2-a679-1a027cbc8ded {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.478847] env[62519]: DEBUG nova.compute.provider_tree [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2072.551695] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.825208] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 31db4b14-0ba3-4159-accc-31c21bd81322] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2072.983877] env[62519]: DEBUG nova.scheduler.client.report [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2073.019969] env[62519]: DEBUG nova.network.neutron [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance_info_cache with network_info: [{"id": "55b99762-526d-4033-9eed-24af176c71e4", "address": "fa:16:3e:d8:0e:f7", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b99762-52", "ovs_interfaceid": "55b99762-526d-4033-9eed-24af176c71e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2073.051971] env[62519]: DEBUG oslo_vmware.api [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803545, 'name': ReconfigVM_Task, 'duration_secs': 5.843577} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.052220] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2073.052444] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Reconfigured VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2073.328437] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: fc3beaba-2ad5-4598-b562-557fdd552b39] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2073.488930] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.511306] env[62519]: INFO nova.scheduler.client.report [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Deleted allocations for instance 90cdb4a1-6192-42e8-ad30-90faeefa4d18 [ 2073.522751] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2073.831322] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 71edba06-1628-4749-8a51-5a9bcb003fda] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2074.019653] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea93d9ce-51f0-4a58-b086-5e344fb73359 tempest-ServerRescueTestJSON-1835819143 tempest-ServerRescueTestJSON-1835819143-project-member] Lock "90cdb4a1-6192-42e8-ad30-90faeefa4d18" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.841s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2074.128492] env[62519]: DEBUG nova.compute.manager [req-6ae141dc-0c61-4aea-abaf-ef5d4fa4433e req-4e4d9e9b-06de-4b36-8d54-5e7a486ccf08 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2074.128691] env[62519]: DEBUG nova.compute.manager [req-6ae141dc-0c61-4aea-abaf-ef5d4fa4433e req-4e4d9e9b-06de-4b36-8d54-5e7a486ccf08 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing instance network info cache due to event network-changed-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2074.128908] env[62519]: DEBUG oslo_concurrency.lockutils [req-6ae141dc-0c61-4aea-abaf-ef5d4fa4433e req-4e4d9e9b-06de-4b36-8d54-5e7a486ccf08 service nova] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2074.129312] env[62519]: DEBUG oslo_concurrency.lockutils [req-6ae141dc-0c61-4aea-abaf-ef5d4fa4433e req-4e4d9e9b-06de-4b36-8d54-5e7a486ccf08 service nova] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2074.129551] env[62519]: DEBUG nova.network.neutron [req-6ae141dc-0c61-4aea-abaf-ef5d4fa4433e req-4e4d9e9b-06de-4b36-8d54-5e7a486ccf08 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Refreshing network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2074.335050] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 618a1db6-4056-4380-b5df-395ac14165a7] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2074.343499] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2074.789881] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-39c292a3-7032-4845-9ae5-2a41d13db305-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.790110] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-39c292a3-7032-4845-9ae5-2a41d13db305-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.790462] env[62519]: DEBUG nova.objects.instance [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'flavor' on Instance uuid 39c292a3-7032-4845-9ae5-2a41d13db305 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2074.825734] env[62519]: DEBUG nova.network.neutron [req-6ae141dc-0c61-4aea-abaf-ef5d4fa4433e req-4e4d9e9b-06de-4b36-8d54-5e7a486ccf08 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updated VIF entry in instance network info cache for port 7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2074.826133] env[62519]: DEBUG nova.network.neutron [req-6ae141dc-0c61-4aea-abaf-ef5d4fa4433e req-4e4d9e9b-06de-4b36-8d54-5e7a486ccf08 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "address": "fa:16:3e:ec:b1:50", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc92fa6c-1a", "ovs_interfaceid": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2074.838472] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 9327a897-8e4f-4c59-952e-aecfac4028e0] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2075.036347] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eee4370-463c-41a0-b1a9-c277340fe029 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.056289] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance '4049de7f-4ace-4017-8f9a-63817de5f81c' progress to 0 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2075.328230] env[62519]: DEBUG oslo_concurrency.lockutils [req-6ae141dc-0c61-4aea-abaf-ef5d4fa4433e req-4e4d9e9b-06de-4b36-8d54-5e7a486ccf08 service nova] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2075.329077] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2075.329311] env[62519]: DEBUG nova.network.neutron [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2075.341844] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 70abb2e0-1ff2-49dd-b40f-9cac244a249e] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2075.416710] env[62519]: DEBUG nova.objects.instance [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'pci_requests' on Instance uuid 39c292a3-7032-4845-9ae5-2a41d13db305 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2075.563434] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2075.563816] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a17e7f38-c067-4f6f-b061-c5d710bda924 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.572866] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2075.572866] env[62519]: value = "task-1803549" [ 2075.572866] env[62519]: _type = "Task" [ 2075.572866] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.580505] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.844736] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 646c9dfc-7b78-4cdb-b4f5-480c43af38c4] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2075.918866] env[62519]: DEBUG nova.objects.base [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Object Instance<39c292a3-7032-4845-9ae5-2a41d13db305> lazy-loaded attributes: flavor,pci_requests {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2075.919133] env[62519]: DEBUG nova.network.neutron [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2076.026707] env[62519]: DEBUG nova.policy [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eca5c7b079d4785941d68d7c51df5e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63a46158057949478e5c79fbe0d4d5d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2076.083240] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803549, 'name': PowerOffVM_Task, 'duration_secs': 0.237142} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.083503] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2076.083687] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance '4049de7f-4ace-4017-8f9a-63817de5f81c' progress to 17 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2076.114072] env[62519]: INFO nova.network.neutron [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Port bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2076.114409] env[62519]: DEBUG nova.network.neutron [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2076.157705] env[62519]: DEBUG nova.compute.manager [req-31e0ec97-b0f3-4727-b6ad-b6f3a721ed59 req-e8b5edfb-fd4a-4f7d-8148-cac026fd23e4 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received event network-changed-03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2076.157705] env[62519]: DEBUG nova.compute.manager [req-31e0ec97-b0f3-4727-b6ad-b6f3a721ed59 req-e8b5edfb-fd4a-4f7d-8148-cac026fd23e4 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing instance network info cache due to event network-changed-03d52b0a-655f-4661-87b2-35d1b1af19a4. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2076.159056] env[62519]: DEBUG oslo_concurrency.lockutils [req-31e0ec97-b0f3-4727-b6ad-b6f3a721ed59 req-e8b5edfb-fd4a-4f7d-8148-cac026fd23e4 service nova] Acquiring lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.159518] env[62519]: DEBUG oslo_concurrency.lockutils [req-31e0ec97-b0f3-4727-b6ad-b6f3a721ed59 req-e8b5edfb-fd4a-4f7d-8148-cac026fd23e4 service nova] Acquired lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.159928] env[62519]: DEBUG nova.network.neutron [req-31e0ec97-b0f3-4727-b6ad-b6f3a721ed59 req-e8b5edfb-fd4a-4f7d-8148-cac026fd23e4 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing network info cache for port 03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2076.351438] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: eb5de0a4-0af3-4731-ab30-3ae3d72207a7] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2076.590385] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2076.590667] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2076.590876] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2076.591107] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2076.591291] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2076.591493] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2076.591776] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2076.591973] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2076.592192] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2076.592391] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2076.592608] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2076.597642] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8757f34-9373-4282-a629-e63ca9f4a28f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.613065] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2076.613065] env[62519]: value = "task-1803550" [ 2076.613065] env[62519]: _type = "Task" [ 2076.613065] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.616674] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2076.623661] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803550, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.855023] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: fcff790b-d267-4d8c-80d8-ad66cfb66539] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2076.893263] env[62519]: DEBUG nova.network.neutron [req-31e0ec97-b0f3-4727-b6ad-b6f3a721ed59 req-e8b5edfb-fd4a-4f7d-8148-cac026fd23e4 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updated VIF entry in instance network info cache for port 03d52b0a-655f-4661-87b2-35d1b1af19a4. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2076.893609] env[62519]: DEBUG nova.network.neutron [req-31e0ec97-b0f3-4727-b6ad-b6f3a721ed59 req-e8b5edfb-fd4a-4f7d-8148-cac026fd23e4 service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [{"id": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "address": "fa:16:3e:3c:7b:8d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03d52b0a-65", "ovs_interfaceid": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.125552] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3baa0a60-a5eb-4288-95a8-4a2e3fe20236 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-a18af9b7-4548-42d1-8459-508298cb96dc-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.662s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.126516] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803550, 'name': ReconfigVM_Task, 'duration_secs': 0.186406} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.126986] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance '4049de7f-4ace-4017-8f9a-63817de5f81c' progress to 33 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2077.360622] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: e143206e-eb12-41b8-9140-229c1533fd80] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2077.396550] env[62519]: DEBUG oslo_concurrency.lockutils [req-31e0ec97-b0f3-4727-b6ad-b6f3a721ed59 req-e8b5edfb-fd4a-4f7d-8148-cac026fd23e4 service nova] Releasing lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.540226] env[62519]: DEBUG nova.network.neutron [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Successfully updated port: bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2077.633289] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2077.633581] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2077.633784] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2077.633985] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2077.634152] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2077.634298] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2077.634501] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2077.634660] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2077.634821] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2077.634980] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2077.635168] env[62519]: DEBUG nova.virt.hardware [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2077.640473] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2077.641290] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83b8200d-96af-4a20-9ec4-967caeca8b8e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.660481] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2077.660481] env[62519]: value = "task-1803551" [ 2077.660481] env[62519]: _type = "Task" [ 2077.660481] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.668853] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.864533] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4a9656fd-2b9f-4dd6-8b71-39e55813f2f6] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2078.043367] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.043367] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.043512] env[62519]: DEBUG nova.network.neutron [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2078.170903] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803551, 'name': ReconfigVM_Task, 'duration_secs': 0.167576} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.171230] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2078.172078] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89169cda-0c83-4c62-aa79-c94db8f7e3b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.194318] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 4049de7f-4ace-4017-8f9a-63817de5f81c/4049de7f-4ace-4017-8f9a-63817de5f81c.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2078.195443] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-229b4da2-e22f-4f2d-957e-e00c74078db2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.208613] env[62519]: DEBUG nova.compute.manager [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received event network-vif-plugged-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2078.208812] env[62519]: DEBUG oslo_concurrency.lockutils [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] Acquiring lock "39c292a3-7032-4845-9ae5-2a41d13db305-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.209025] env[62519]: DEBUG oslo_concurrency.lockutils [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] Lock "39c292a3-7032-4845-9ae5-2a41d13db305-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.209196] env[62519]: DEBUG oslo_concurrency.lockutils [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] Lock "39c292a3-7032-4845-9ae5-2a41d13db305-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.209378] env[62519]: DEBUG nova.compute.manager [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] No waiting events found dispatching network-vif-plugged-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2078.209560] env[62519]: WARNING nova.compute.manager [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received unexpected event network-vif-plugged-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c for instance with vm_state active and task_state None. [ 2078.209720] env[62519]: DEBUG nova.compute.manager [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received event network-changed-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2078.209892] env[62519]: DEBUG nova.compute.manager [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing instance network info cache due to event network-changed-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2078.210082] env[62519]: DEBUG oslo_concurrency.lockutils [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] Acquiring lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.216191] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2078.216191] env[62519]: value = "task-1803552" [ 2078.216191] env[62519]: _type = "Task" [ 2078.216191] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.224779] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803552, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.367949] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: f3665f89-1747-4567-9e56-c937d4ac81da] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2078.581472] env[62519]: WARNING nova.network.neutron [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] 3996e7f6-f093-4152-af91-6fb77f32a1c5 already exists in list: networks containing: ['3996e7f6-f093-4152-af91-6fb77f32a1c5']. ignoring it [ 2078.727046] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803552, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.851570] env[62519]: DEBUG nova.network.neutron [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [{"id": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "address": "fa:16:3e:3c:7b:8d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03d52b0a-65", "ovs_interfaceid": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "address": "fa:16:3e:ec:b1:50", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc92fa6c-1a", "ovs_interfaceid": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.871600] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 21daee7a-1788-4a1c-969e-65b696702bf2] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2079.226432] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803552, 'name': ReconfigVM_Task, 'duration_secs': 0.80555} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.226669] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 4049de7f-4ace-4017-8f9a-63817de5f81c/4049de7f-4ace-4017-8f9a-63817de5f81c.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2079.226940] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance '4049de7f-4ace-4017-8f9a-63817de5f81c' progress to 50 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2079.355070] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.355752] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.355941] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.356252] env[62519]: DEBUG oslo_concurrency.lockutils [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] Acquired lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.356433] env[62519]: DEBUG nova.network.neutron [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Refreshing network info cache for port bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2079.358060] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4483936-b95a-4b94-838f-b5ed9f678578 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.375451] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 71c6bd0c-40da-44eb-8f37-4f1b7fb0f0a2] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2079.377399] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2079.377618] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2079.377777] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2079.377958] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2079.378126] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2079.378273] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2079.378480] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2079.378674] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2079.378850] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2079.379027] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2079.379204] env[62519]: DEBUG nova.virt.hardware [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2079.385591] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Reconfiguring VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2079.386575] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4e7208f-7e83-4f67-8d8a-c6672164bde9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.404543] env[62519]: DEBUG oslo_vmware.api [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2079.404543] env[62519]: value = "task-1803553" [ 2079.404543] env[62519]: _type = "Task" [ 2079.404543] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.412368] env[62519]: DEBUG oslo_vmware.api [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803553, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.733724] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde3c814-c6d1-4d10-8190-2d2da1c168f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.752651] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a07112d-134f-42bc-bf67-03ab89cf9d5a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.769622] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance '4049de7f-4ace-4017-8f9a-63817de5f81c' progress to 67 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2079.887069] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 1c4615c7-d145-4529-98bd-1ae3ed51e1b5] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2079.917021] env[62519]: DEBUG oslo_vmware.api [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803553, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.089322] env[62519]: DEBUG nova.network.neutron [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updated VIF entry in instance network info cache for port bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2080.089685] env[62519]: DEBUG nova.network.neutron [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [{"id": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "address": "fa:16:3e:3c:7b:8d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03d52b0a-65", "ovs_interfaceid": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "address": "fa:16:3e:ec:b1:50", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc92fa6c-1a", "ovs_interfaceid": "bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.307683] env[62519]: DEBUG nova.network.neutron [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Port 55b99762-526d-4033-9eed-24af176c71e4 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2080.390086] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 2bc8f11e-82fc-4acb-945e-15327c133920] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2080.415135] env[62519]: DEBUG oslo_vmware.api [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803553, 'name': ReconfigVM_Task, 'duration_secs': 0.621146} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.415659] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2080.415875] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Reconfigured VM to attach interface {{(pid=62519) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2080.592898] env[62519]: DEBUG oslo_concurrency.lockutils [req-8c79d2c6-a1c2-48f1-b0dd-8effe7138b03 req-19284de5-7970-46cb-9193-d004a81b947e service nova] Releasing lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2080.893235] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 10bfd4ac-6f11-4c96-87a0-ce74bc1193c4] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2080.920034] env[62519]: DEBUG oslo_concurrency.lockutils [None req-7c6fe533-e774-4e90-a7b4-2f2fd75310a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-39c292a3-7032-4845-9ae5-2a41d13db305-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.130s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.331074] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "4049de7f-4ace-4017-8f9a-63817de5f81c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.331074] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.331346] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.396303] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 0dc95d5e-270c-44cf-b13a-d9ecd0cc3b17] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2081.900073] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 358ee402-1112-4eea-a518-a45a6bf92c31] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2082.367591] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.367785] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.367959] env[62519]: DEBUG nova.network.neutron [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2082.403830] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4e7db12e-c7f7-4d2a-b797-1371fc839a9e] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2082.906470] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 4a0f7975-5a07-4593-ae71-cabebdefe0fe] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2083.092721] env[62519]: DEBUG nova.network.neutron [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance_info_cache with network_info: [{"id": "55b99762-526d-4033-9eed-24af176c71e4", "address": "fa:16:3e:d8:0e:f7", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b99762-52", "ovs_interfaceid": "55b99762-526d-4033-9eed-24af176c71e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.410768] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 5da884af-d8d2-409b-99bd-e5370e44e9f0] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2083.595469] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.667823] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "interface-39c292a3-7032-4845-9ae5-2a41d13db305-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.668036] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-39c292a3-7032-4845-9ae5-2a41d13db305-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.914452] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 2ea8304e-5b91-4908-a876-6e2c780b1da9] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2084.120678] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b095cf-39f1-4173-86e4-c1ece160c360 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.139704] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e410fa9e-3864-4cf0-8169-c335b946c1ff {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.146504] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance '4049de7f-4ace-4017-8f9a-63817de5f81c' progress to 83 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2084.171046] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2084.171046] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2084.171751] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb83d486-7d04-4c08-97e8-bca8f577ba77 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.190602] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa072068-cc0e-4fe9-9b66-4d8b1eac9f46 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.216412] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Reconfiguring VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2084.216658] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57f96b0b-6ab4-4814-9f48-28544c56a7fd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.234451] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2084.234451] env[62519]: value = "task-1803554" [ 2084.234451] env[62519]: _type = "Task" [ 2084.234451] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.242082] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.417949] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: ee4b10ba-1c56-47cf-a528-d6e65c286ddb] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2084.653313] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2084.653608] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8be6da89-426c-4d72-ac9c-08aa5f1a2f6a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.661146] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2084.661146] env[62519]: value = "task-1803555" [ 2084.661146] env[62519]: _type = "Task" [ 2084.661146] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.668943] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803555, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.743690] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.921711] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 88f9351c-253b-49dd-a88e-b8585ea742ac] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2085.170594] env[62519]: DEBUG oslo_vmware.api [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803555, 'name': PowerOnVM_Task, 'duration_secs': 0.366559} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.170843] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2085.171037] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b402aad4-b400-4852-a227-a3b19ec0af4d tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance '4049de7f-4ace-4017-8f9a-63817de5f81c' progress to 100 {{(pid=62519) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2085.244242] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.425113] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 9f71845a-e80c-4822-b3de-717f1d83bc49] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2085.746869] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.928545] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 99f22198-1a65-4d0d-b665-90c7063dbdb9] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2086.246014] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.431930] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 417588f8-6288-4ecd-9764-dbc923549c5d] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2086.746113] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.934732] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: bace23b3-b7f4-4f3b-8986-0076440d096d] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2087.246666] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.438239] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 46b3a0fb-29f6-4b66-a091-2d125b69d109] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2087.746604] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.941957] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 8b20b91d-d4e7-4ec2-88ec-76b444a8d8a8] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2087.950902] env[62519]: DEBUG nova.network.neutron [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Port 55b99762-526d-4033-9eed-24af176c71e4 binding to destination host cpu-1 is already ACTIVE {{(pid=62519) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2087.951176] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2087.951328] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2087.951490] env[62519]: DEBUG nova.network.neutron [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2088.248218] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.444682] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: f0925a44-c15b-4415-99bc-1b2366292fe4] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2088.653140] env[62519]: DEBUG nova.network.neutron [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance_info_cache with network_info: [{"id": "55b99762-526d-4033-9eed-24af176c71e4", "address": "fa:16:3e:d8:0e:f7", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b99762-52", "ovs_interfaceid": "55b99762-526d-4033-9eed-24af176c71e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.747360] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.947933] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 11d4a010-959f-4f53-94dc-7499007612ad] Instance has had 0 of 5 cleanup attempts {{(pid=62519) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11733}} [ 2089.156241] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2089.248031] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.451279] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2089.451482] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Cleaning up deleted instances with incomplete migration {{(pid=62519) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11758}} [ 2089.658932] env[62519]: DEBUG nova.compute.manager [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62519) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:902}} [ 2089.659181] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.659416] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.748911] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.162832] env[62519]: DEBUG nova.objects.instance [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'migration_context' on Instance uuid 4049de7f-4ace-4017-8f9a-63817de5f81c {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2090.249513] env[62519]: DEBUG oslo_vmware.api [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803554, 'name': ReconfigVM_Task, 'duration_secs': 5.973996} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.249765] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2090.249923] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Reconfigured VM to detach interface {{(pid=62519) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2090.750831] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff75b6ca-0be9-4384-b8ef-e1e8376644f4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.758584] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c64565-c3bb-463a-a147-5f9506ed0df4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.790636] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7fb8f2-ab72-4c0b-a533-05afb9e75775 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.798744] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b73496-bdea-42d3-9dd9-a1207031c4ac {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.812017] env[62519]: DEBUG nova.compute.provider_tree [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2090.947364] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.947618] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.947721] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 2091.314885] env[62519]: DEBUG nova.scheduler.client.report [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2091.521543] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.521774] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquired lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.521963] env[62519]: DEBUG nova.network.neutron [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2092.065279] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "39c292a3-7032-4845-9ae5-2a41d13db305" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.065654] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "39c292a3-7032-4845-9ae5-2a41d13db305" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.065958] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "39c292a3-7032-4845-9ae5-2a41d13db305-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.066261] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "39c292a3-7032-4845-9ae5-2a41d13db305-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.066533] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "39c292a3-7032-4845-9ae5-2a41d13db305-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.068600] env[62519]: INFO nova.compute.manager [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Terminating instance [ 2092.230232] env[62519]: INFO nova.network.neutron [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Port bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2092.230581] env[62519]: DEBUG nova.network.neutron [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [{"id": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "address": "fa:16:3e:3c:7b:8d", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03d52b0a-65", "ovs_interfaceid": "03d52b0a-655f-4661-87b2-35d1b1af19a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.324771] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.665s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.483313] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.483496] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquired lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.483617] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Forcefully refreshing network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2092.572589] env[62519]: DEBUG nova.compute.manager [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2092.572820] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2092.573733] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fee4d8-35a9-4ce0-9533-e3630541add8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.582170] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2092.582356] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6868e97c-3d10-43d2-ad95-662a4fc19eb4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.588585] env[62519]: DEBUG oslo_vmware.api [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2092.588585] env[62519]: value = "task-1803556" [ 2092.588585] env[62519]: _type = "Task" [ 2092.588585] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.596125] env[62519]: DEBUG oslo_vmware.api [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.733513] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Releasing lock "refresh_cache-39c292a3-7032-4845-9ae5-2a41d13db305" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.098299] env[62519]: DEBUG oslo_vmware.api [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803556, 'name': PowerOffVM_Task, 'duration_secs': 0.220831} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.098519] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2093.098694] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2093.099243] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f78e2e4-af60-4a84-a942-d217d7f18997 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.185943] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2093.186170] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2093.186366] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleting the datastore file [datastore1] 39c292a3-7032-4845-9ae5-2a41d13db305 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2093.186802] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef2d0358-3735-41c1-bd9c-03fd505c021d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.194062] env[62519]: DEBUG oslo_vmware.api [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2093.194062] env[62519]: value = "task-1803558" [ 2093.194062] env[62519]: _type = "Task" [ 2093.194062] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.202477] env[62519]: DEBUG oslo_vmware.api [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803558, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.237643] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4d6b2d9d-7d1b-4238-a361-105f2dfaf89a tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "interface-39c292a3-7032-4845-9ae5-2a41d13db305-bc92fa6c-1a84-46e1-b36f-1b5430ecbb4c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.569s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.687690] env[62519]: DEBUG oslo_concurrency.lockutils [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.688068] env[62519]: DEBUG oslo_concurrency.lockutils [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2093.703994] env[62519]: DEBUG oslo_vmware.api [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803558, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140115} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.704857] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [{"id": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "address": "fa:16:3e:4a:4f:32", "network": {"id": "3996e7f6-f093-4152-af91-6fb77f32a1c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2013009403-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63a46158057949478e5c79fbe0d4d5d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b3a7ba6-9e", "ovs_interfaceid": "7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.706308] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2093.706611] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2093.706802] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2093.706975] env[62519]: INFO nova.compute.manager [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2093.707229] env[62519]: DEBUG oslo.service.loopingcall [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2093.707416] env[62519]: DEBUG nova.compute.manager [-] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2093.707505] env[62519]: DEBUG nova.network.neutron [-] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2093.864815] env[62519]: INFO nova.compute.manager [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Swapping old allocation on dict_keys(['f8ca0d98-9158-4b85-ae0e-b106f966dd44']) held by migration d672b29e-b5ae-40b8-b88e-4dad422f550a for instance [ 2093.886903] env[62519]: DEBUG nova.scheduler.client.report [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Overwriting current allocation {'allocations': {'f8ca0d98-9158-4b85-ae0e-b106f966dd44': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 182}}, 'project_id': '12977ed65a1b410a987b049e9d1dce3e', 'user_id': 'c586ba4158b947b0a25d1614c17ebb51', 'consumer_generation': 1} on consumer 4049de7f-4ace-4017-8f9a-63817de5f81c {{(pid=62519) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 2093.988135] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2093.988344] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2093.988527] env[62519]: DEBUG nova.network.neutron [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2094.191610] env[62519]: INFO nova.compute.manager [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Detaching volume 64f7dc0b-1b91-40f1-85b7-8256d2cde712 [ 2094.207393] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Releasing lock "refresh_cache-a18af9b7-4548-42d1-8459-508298cb96dc" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2094.207591] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updated the network info_cache for instance {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10489}} [ 2094.207792] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2094.207945] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2094.208130] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2094.208258] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 2094.222287] env[62519]: INFO nova.virt.block_device [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Attempting to driver detach volume 64f7dc0b-1b91-40f1-85b7-8256d2cde712 from mountpoint /dev/sdb [ 2094.223318] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Volume detach. Driver type: vmdk {{(pid=62519) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2094.223318] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373881', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'name': 'volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '47439070-54d8-454c-bf1d-7a2a33d82e9a', 'attached_at': '', 'detached_at': '', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'serial': '64f7dc0b-1b91-40f1-85b7-8256d2cde712'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2094.223616] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee80a689-24f5-4a0f-b09d-778f755085cf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.246601] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c023966-1a8f-4dd9-ac03-007b5668d97b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.254818] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628b4827-9871-4856-b36e-2c52b5e363b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.278127] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fca3dd9-c5ab-46e6-bca6-029b0e24ed3f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.297175] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] The volume has not been displaced from its original location: [datastore1] volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712/volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712.vmdk. No consolidation needed. {{(pid=62519) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2094.302396] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Reconfiguring VM instance instance-00000072 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2094.302726] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13362c9c-41bd-4df5-a089-5570fd11a7b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.321035] env[62519]: DEBUG oslo_vmware.api [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2094.321035] env[62519]: value = "task-1803559" [ 2094.321035] env[62519]: _type = "Task" [ 2094.321035] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.606218] env[62519]: DEBUG nova.compute.manager [req-14cf5197-5a58-4811-96f2-dc66bab2edc6 req-ef313f9d-5eb9-4395-9973-852d8ad13dfb service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Received event network-vif-deleted-03d52b0a-655f-4661-87b2-35d1b1af19a4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2094.606468] env[62519]: INFO nova.compute.manager [req-14cf5197-5a58-4811-96f2-dc66bab2edc6 req-ef313f9d-5eb9-4395-9973-852d8ad13dfb service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Neutron deleted interface 03d52b0a-655f-4661-87b2-35d1b1af19a4; detaching it from the instance and deleting it from the info cache [ 2094.606649] env[62519]: DEBUG nova.network.neutron [req-14cf5197-5a58-4811-96f2-dc66bab2edc6 req-ef313f9d-5eb9-4395-9973-852d8ad13dfb service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.747400] env[62519]: DEBUG nova.network.neutron [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance_info_cache with network_info: [{"id": "55b99762-526d-4033-9eed-24af176c71e4", "address": "fa:16:3e:d8:0e:f7", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55b99762-52", "ovs_interfaceid": "55b99762-526d-4033-9eed-24af176c71e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.830954] env[62519]: DEBUG oslo_vmware.api [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803559, 'name': ReconfigVM_Task, 'duration_secs': 0.312967} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.831253] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Reconfigured VM instance instance-00000072 to detach disk 2001 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2094.835899] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2665082-9715-43a1-a1bd-a19691fc61da {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.851910] env[62519]: DEBUG oslo_vmware.api [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2094.851910] env[62519]: value = "task-1803560" [ 2094.851910] env[62519]: _type = "Task" [ 2094.851910] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.859452] env[62519]: DEBUG oslo_vmware.api [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803560, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.074671] env[62519]: DEBUG nova.network.neutron [-] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2095.109376] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f98a75e-174a-422d-bfee-9d92c10a441b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.119302] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bc9be3-2116-407a-afed-981762d3ce9c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.147246] env[62519]: DEBUG nova.compute.manager [req-14cf5197-5a58-4811-96f2-dc66bab2edc6 req-ef313f9d-5eb9-4395-9973-852d8ad13dfb service nova] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Detach interface failed, port_id=03d52b0a-655f-4661-87b2-35d1b1af19a4, reason: Instance 39c292a3-7032-4845-9ae5-2a41d13db305 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2095.250376] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-4049de7f-4ace-4017-8f9a-63817de5f81c" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2095.251024] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2095.251186] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2121cb2e-af9b-47b3-8233-57858815d997 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.258389] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2095.258389] env[62519]: value = "task-1803561" [ 2095.258389] env[62519]: _type = "Task" [ 2095.258389] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.266728] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803561, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.362774] env[62519]: DEBUG oslo_vmware.api [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803560, 'name': ReconfigVM_Task, 'duration_secs': 0.198224} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.363059] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-373881', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'name': 'volume-64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '47439070-54d8-454c-bf1d-7a2a33d82e9a', 'attached_at': '', 'detached_at': '', 'volume_id': '64f7dc0b-1b91-40f1-85b7-8256d2cde712', 'serial': '64f7dc0b-1b91-40f1-85b7-8256d2cde712'} {{(pid=62519) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2095.577140] env[62519]: INFO nova.compute.manager [-] [instance: 39c292a3-7032-4845-9ae5-2a41d13db305] Took 1.87 seconds to deallocate network for instance. [ 2095.770021] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803561, 'name': PowerOffVM_Task, 'duration_secs': 0.179149} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.770333] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2095.770899] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2095.771118] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2095.771274] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2095.771460] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2095.771603] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2095.771761] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2095.771982] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2095.772160] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2095.772326] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2095.772485] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2095.772651] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2095.777570] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b4ed958-7dd7-4a4c-bd8a-c6e6d5f72311 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.792932] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2095.792932] env[62519]: value = "task-1803562" [ 2095.792932] env[62519]: _type = "Task" [ 2095.792932] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.801068] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.909473] env[62519]: DEBUG nova.objects.instance [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'flavor' on Instance uuid 47439070-54d8-454c-bf1d-7a2a33d82e9a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2096.084493] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.084694] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.085013] env[62519]: DEBUG nova.objects.instance [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'resources' on Instance uuid 39c292a3-7032-4845-9ae5-2a41d13db305 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2096.303408] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803562, 'name': ReconfigVM_Task, 'duration_secs': 0.130015} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.304041] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4da4f6-64ce-4c83-9a20-abe4ca0bf4aa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.322207] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2096.322447] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2096.322604] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2096.322820] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2096.323015] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2096.323171] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2096.323379] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2096.323538] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2096.323702] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2096.323868] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2096.324051] env[62519]: DEBUG nova.virt.hardware [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2096.324866] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdce38e8-8e16-43ec-819e-641d15f61f68 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.331121] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2096.331121] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52914483-d939-7ebf-611a-b3237ca84665" [ 2096.331121] env[62519]: _type = "Task" [ 2096.331121] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.338794] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52914483-d939-7ebf-611a-b3237ca84665, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.657758] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dab6af-ba46-45ab-9e2a-c54d15f99df4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.665020] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c75cc5-6b8f-4aad-9b7d-f0e82542cf09 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.696081] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3dc524-a473-4d15-be63-4cf40279bfd2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.703353] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb303c4a-6fbc-4844-82b6-56dfe4982cd6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.716225] env[62519]: DEBUG nova.compute.provider_tree [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2096.841146] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52914483-d939-7ebf-611a-b3237ca84665, 'name': SearchDatastore_Task, 'duration_secs': 0.008495} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.846552] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2096.846789] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c3e72e8-f28c-45ce-913f-f34f941d2b5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.864893] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2096.864893] env[62519]: value = "task-1803563" [ 2096.864893] env[62519]: _type = "Task" [ 2096.864893] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.872262] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803563, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.915859] env[62519]: DEBUG oslo_concurrency.lockutils [None req-044deee8-d16f-478d-bde6-3dc8db193339 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.228s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.219409] env[62519]: DEBUG nova.scheduler.client.report [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2097.375022] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803563, 'name': ReconfigVM_Task, 'duration_secs': 0.209903} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.375308] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=62519) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2097.376343] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cd557b-a227-42d3-97e7-01ce7c870924 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.397850] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 4049de7f-4ace-4017-8f9a-63817de5f81c/4049de7f-4ace-4017-8f9a-63817de5f81c.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2097.398151] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b6cf4db-77ca-4bff-ab2c-74b0615f8f00 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.419483] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2097.419483] env[62519]: value = "task-1803564" [ 2097.419483] env[62519]: _type = "Task" [ 2097.419483] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.428180] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803564, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.724937] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.640s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.746690] env[62519]: INFO nova.scheduler.client.report [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleted allocations for instance 39c292a3-7032-4845-9ae5-2a41d13db305 [ 2097.920942] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.921309] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.921447] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.921629] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.921840] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.929196] env[62519]: INFO nova.compute.manager [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Terminating instance [ 2097.936707] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803564, 'name': ReconfigVM_Task, 'duration_secs': 0.274362} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.937886] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 4049de7f-4ace-4017-8f9a-63817de5f81c/4049de7f-4ace-4017-8f9a-63817de5f81c.vmdk or device None with type thin {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2097.938511] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0c4311-2d06-4851-a29d-99e118f9919a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.957152] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864f1113-c1e7-4bb0-9918-020f197e7471 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.974960] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b91f65d-ef1c-4b9d-8729-5a45d078f49a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.993761] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da33fdbb-f06e-4920-86bd-8d3e8089e10b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.999563] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2097.999795] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-148b12bb-0b86-4bc8-964e-5101c2abfd28 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.006541] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2098.006541] env[62519]: value = "task-1803565" [ 2098.006541] env[62519]: _type = "Task" [ 2098.006541] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.014120] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803565, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.254685] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d9129d33-91f4-4d9c-9dd3-83375e2247a7 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "39c292a3-7032-4845-9ae5-2a41d13db305" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.189s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.432959] env[62519]: DEBUG nova.compute.manager [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2098.433237] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2098.434196] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7adbf6-4d13-44a4-81fc-93c67c7c54d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.442593] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2098.444326] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e210293-34c2-49da-974e-3ad3c41c75d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.452280] env[62519]: DEBUG oslo_vmware.api [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2098.452280] env[62519]: value = "task-1803566" [ 2098.452280] env[62519]: _type = "Task" [ 2098.452280] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.461433] env[62519]: DEBUG oslo_vmware.api [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.517963] env[62519]: DEBUG oslo_vmware.api [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803565, 'name': PowerOnVM_Task, 'duration_secs': 0.35982} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.518213] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2098.948339] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "a18af9b7-4548-42d1-8459-508298cb96dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.948713] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "a18af9b7-4548-42d1-8459-508298cb96dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.948822] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "a18af9b7-4548-42d1-8459-508298cb96dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.948999] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "a18af9b7-4548-42d1-8459-508298cb96dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.949190] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "a18af9b7-4548-42d1-8459-508298cb96dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.951669] env[62519]: INFO nova.compute.manager [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Terminating instance [ 2098.962224] env[62519]: DEBUG oslo_vmware.api [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803566, 'name': PowerOffVM_Task, 'duration_secs': 0.203446} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.962917] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2098.963111] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2098.963342] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c5977a4-d2db-4a3c-a45c-a7979da95ffb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.222952] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2099.223194] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2099.223376] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleting the datastore file [datastore1] 47439070-54d8-454c-bf1d-7a2a33d82e9a {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2099.223627] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86690cf8-ee2d-4d05-a39d-be80f7ff0f6c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.229922] env[62519]: DEBUG oslo_vmware.api [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2099.229922] env[62519]: value = "task-1803568" [ 2099.229922] env[62519]: _type = "Task" [ 2099.229922] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.237584] env[62519]: DEBUG oslo_vmware.api [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.458816] env[62519]: DEBUG nova.compute.manager [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2099.459073] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2099.459971] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49fb419-93fb-4236-94aa-7cc935667796 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.468376] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2099.468665] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f5a2f37-34b0-4547-99e6-fa7d1a012a9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.475042] env[62519]: DEBUG oslo_vmware.api [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2099.475042] env[62519]: value = "task-1803569" [ 2099.475042] env[62519]: _type = "Task" [ 2099.475042] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.484424] env[62519]: DEBUG oslo_vmware.api [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803569, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.528798] env[62519]: INFO nova.compute.manager [None req-ccb7464a-b501-46be-b029-d65dfeee995e tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance to original state: 'active' [ 2099.739842] env[62519]: DEBUG oslo_vmware.api [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12859} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.740067] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2099.740245] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2099.740407] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2099.740578] env[62519]: INFO nova.compute.manager [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Took 1.31 seconds to destroy the instance on the hypervisor. [ 2099.740815] env[62519]: DEBUG oslo.service.loopingcall [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2099.740994] env[62519]: DEBUG nova.compute.manager [-] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2099.741093] env[62519]: DEBUG nova.network.neutron [-] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2099.985547] env[62519]: DEBUG oslo_vmware.api [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803569, 'name': PowerOffVM_Task, 'duration_secs': 0.208085} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.986915] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2099.987327] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2099.989983] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dfb852c-9730-4cf5-a57f-025852e72ca0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.132060] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2100.132289] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2100.132384] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleting the datastore file [datastore1] a18af9b7-4548-42d1-8459-508298cb96dc {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2100.132716] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba413670-96e7-4120-9a35-67b8a14df01b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.139105] env[62519]: DEBUG oslo_vmware.api [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for the task: (returnval){ [ 2100.139105] env[62519]: value = "task-1803571" [ 2100.139105] env[62519]: _type = "Task" [ 2100.139105] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.147514] env[62519]: DEBUG oslo_vmware.api [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803571, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.232432] env[62519]: DEBUG nova.compute.manager [req-ad295e21-e1c1-4845-aae2-41ddfe1a7eb8 req-e79e6476-27ca-4d8f-b793-cf0691034c74 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Received event network-vif-deleted-9a08cba9-bd48-4013-a6c0-13049c0b5fdb {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2100.232630] env[62519]: INFO nova.compute.manager [req-ad295e21-e1c1-4845-aae2-41ddfe1a7eb8 req-e79e6476-27ca-4d8f-b793-cf0691034c74 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Neutron deleted interface 9a08cba9-bd48-4013-a6c0-13049c0b5fdb; detaching it from the instance and deleting it from the info cache [ 2100.232805] env[62519]: DEBUG nova.network.neutron [req-ad295e21-e1c1-4845-aae2-41ddfe1a7eb8 req-e79e6476-27ca-4d8f-b793-cf0691034c74 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.492617] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "4049de7f-4ace-4017-8f9a-63817de5f81c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.492930] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.493212] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "4049de7f-4ace-4017-8f9a-63817de5f81c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.493415] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.493608] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2100.495820] env[62519]: INFO nova.compute.manager [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Terminating instance [ 2100.648786] env[62519]: DEBUG oslo_vmware.api [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Task: {'id': task-1803571, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105507} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.649055] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2100.649234] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2100.649412] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2100.649586] env[62519]: INFO nova.compute.manager [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Took 1.19 seconds to destroy the instance on the hypervisor. [ 2100.649851] env[62519]: DEBUG oslo.service.loopingcall [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2100.650059] env[62519]: DEBUG nova.compute.manager [-] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2100.650158] env[62519]: DEBUG nova.network.neutron [-] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2100.707856] env[62519]: DEBUG nova.network.neutron [-] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.736138] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b583425-21a5-438b-b87b-16644b50f609 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.746646] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f441dc-a3c3-4c8c-969b-b4ec7e93a543 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.774399] env[62519]: DEBUG nova.compute.manager [req-ad295e21-e1c1-4845-aae2-41ddfe1a7eb8 req-e79e6476-27ca-4d8f-b793-cf0691034c74 service nova] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Detach interface failed, port_id=9a08cba9-bd48-4013-a6c0-13049c0b5fdb, reason: Instance 47439070-54d8-454c-bf1d-7a2a33d82e9a could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2101.000722] env[62519]: DEBUG nova.compute.manager [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2101.000722] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2101.003037] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c85639-1548-4f44-b27e-64c8ace19010 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.009907] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2101.010174] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bce0da7-19ef-43e9-9737-78d6e2575a04 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.017812] env[62519]: DEBUG oslo_vmware.api [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2101.017812] env[62519]: value = "task-1803572" [ 2101.017812] env[62519]: _type = "Task" [ 2101.017812] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.026600] env[62519]: DEBUG oslo_vmware.api [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.211119] env[62519]: INFO nova.compute.manager [-] [instance: 47439070-54d8-454c-bf1d-7a2a33d82e9a] Took 1.47 seconds to deallocate network for instance. [ 2101.407144] env[62519]: DEBUG nova.network.neutron [-] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2101.529487] env[62519]: DEBUG oslo_vmware.api [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803572, 'name': PowerOffVM_Task, 'duration_secs': 0.247104} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.529779] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2101.529974] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2101.530248] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee692455-0a83-4342-bcd1-1f52274347c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.618554] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2101.618827] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2101.619074] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleting the datastore file [datastore1] 4049de7f-4ace-4017-8f9a-63817de5f81c {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2101.619367] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-563028be-341f-43af-8094-22e7b33bfb0e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.333847] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.334233] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.334306] env[62519]: DEBUG nova.objects.instance [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'resources' on Instance uuid 47439070-54d8-454c-bf1d-7a2a33d82e9a {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2102.335376] env[62519]: INFO nova.compute.manager [-] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Took 1.69 seconds to deallocate network for instance. [ 2102.338188] env[62519]: DEBUG nova.compute.manager [req-202d6a41-5883-4a3d-a17a-a337f6c4b519 req-98d07982-c577-4873-8598-231e5faed693 service nova] [instance: a18af9b7-4548-42d1-8459-508298cb96dc] Received event network-vif-deleted-7b3a7ba6-9ee9-40fb-94c8-f7593ffc1ca2 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2102.345516] env[62519]: DEBUG oslo_vmware.api [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2102.345516] env[62519]: value = "task-1803574" [ 2102.345516] env[62519]: _type = "Task" [ 2102.345516] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.354109] env[62519]: DEBUG oslo_vmware.api [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.839930] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.840237] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.840420] env[62519]: INFO nova.compute.manager [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Shelving [ 2102.845424] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.855847] env[62519]: DEBUG oslo_vmware.api [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141439} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.856089] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2102.856268] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2102.856440] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2102.856603] env[62519]: INFO nova.compute.manager [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Took 1.86 seconds to destroy the instance on the hypervisor. [ 2102.856826] env[62519]: DEBUG oslo.service.loopingcall [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2102.857274] env[62519]: DEBUG nova.compute.manager [-] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2102.857372] env[62519]: DEBUG nova.network.neutron [-] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2102.902159] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3d1775-50ac-4575-a369-02b2db733360 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.909388] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c6fb7f-e3ac-425a-9dcf-7152087e0c58 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.941468] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3881306d-a00e-45f9-89bb-3fbc5e531cc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.949609] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ffe58e-c712-481d-b731-08bec26f94a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.964055] env[62519]: DEBUG nova.compute.provider_tree [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2103.468044] env[62519]: DEBUG nova.scheduler.client.report [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2103.771995] env[62519]: DEBUG nova.network.neutron [-] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2103.851334] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2103.851622] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0571d60f-4439-4f20-9f74-7082aebf4375 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.859950] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2103.859950] env[62519]: value = "task-1803575" [ 2103.859950] env[62519]: _type = "Task" [ 2103.859950] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.869838] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.972679] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.975268] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.130s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.975550] env[62519]: DEBUG nova.objects.instance [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lazy-loading 'resources' on Instance uuid a18af9b7-4548-42d1-8459-508298cb96dc {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2103.998023] env[62519]: INFO nova.scheduler.client.report [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted allocations for instance 47439070-54d8-454c-bf1d-7a2a33d82e9a [ 2104.274587] env[62519]: INFO nova.compute.manager [-] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Took 1.42 seconds to deallocate network for instance. [ 2104.281585] env[62519]: DEBUG nova.compute.manager [req-8c7325a6-e3f4-4ed2-b67c-9ae34a3660e1 req-971cc915-23b9-4d03-aacb-86a68e2fa360 service nova] [instance: 4049de7f-4ace-4017-8f9a-63817de5f81c] Received event network-vif-deleted-55b99762-526d-4033-9eed-24af176c71e4 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2104.369698] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803575, 'name': PowerOffVM_Task, 'duration_secs': 0.180394} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.369976] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2104.370772] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5865afa8-8e16-4516-8a25-f0d513bf0795 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.391484] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0355c7c-f286-4ab7-982d-bb8420ef04a4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.504949] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aa80172b-cb51-49fc-9eba-26bd67892e48 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "47439070-54d8-454c-bf1d-7a2a33d82e9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.584s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.531757] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a934adfe-e637-49c4-bbd7-4828fba3f305 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.539276] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40da879-d803-4765-9dc3-da17f046802a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.571130] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccc082b-4ab4-4713-8045-174d4dea4509 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.578809] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8f4693-828e-4672-8b41-18105f22f68f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.592667] env[62519]: DEBUG nova.compute.provider_tree [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2104.784333] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.902134] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2104.902466] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-54a473b9-e0b0-47dc-a186-7ae06c570839 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.911796] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2104.911796] env[62519]: value = "task-1803576" [ 2104.911796] env[62519]: _type = "Task" [ 2104.911796] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.920804] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803576, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.095436] env[62519]: DEBUG nova.scheduler.client.report [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2105.423284] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803576, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.600223] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.602532] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.818s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2105.602736] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.616152] env[62519]: INFO nova.scheduler.client.report [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Deleted allocations for instance a18af9b7-4548-42d1-8459-508298cb96dc [ 2105.621057] env[62519]: INFO nova.scheduler.client.report [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted allocations for instance 4049de7f-4ace-4017-8f9a-63817de5f81c [ 2105.923462] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803576, 'name': CreateSnapshot_Task, 'duration_secs': 0.609906} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.923757] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2105.924662] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d540d7bc-18a5-4d10-be73-01e5d5349af1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.981808] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "f0ebedcb-c064-4604-be62-7c7d5d419864" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2105.982149] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.126374] env[62519]: DEBUG oslo_concurrency.lockutils [None req-bee9fcbe-1d62-4dce-a56b-784949e4c793 tempest-AttachInterfacesTestJSON-1996275057 tempest-AttachInterfacesTestJSON-1996275057-project-member] Lock "a18af9b7-4548-42d1-8459-508298cb96dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.178s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.127312] env[62519]: DEBUG oslo_concurrency.lockutils [None req-4166556a-7c93-4f6a-9eaa-f54dc0ceeb92 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "4049de7f-4ace-4017-8f9a-63817de5f81c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.634s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.444743] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2106.445034] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8a6f6a38-0c45-4f17-9f81-12280c438ef0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.457511] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2106.457511] env[62519]: value = "task-1803577" [ 2106.457511] env[62519]: _type = "Task" [ 2106.457511] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.466011] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803577, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.486590] env[62519]: DEBUG nova.compute.manager [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2106.969202] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803577, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.027289] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.027602] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.004s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2107.029864] env[62519]: INFO nova.compute.claims [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2107.468270] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803577, 'name': CloneVM_Task} progress is 95%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.594221] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "737d119d-943f-4026-8b56-a4856efb6e87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.594465] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "737d119d-943f-4026-8b56-a4856efb6e87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2107.975157] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803577, 'name': CloneVM_Task, 'duration_secs': 1.316127} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.975981] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Created linked-clone VM from snapshot [ 2107.977220] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbb829f-bf7b-494a-9c13-7e5295a25cb0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.989013] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Uploading image d4cf8f55-a2e4-4621-a02e-e1e02e813afd {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2108.017808] env[62519]: DEBUG oslo_vmware.rw_handles [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2108.017808] env[62519]: value = "vm-373889" [ 2108.017808] env[62519]: _type = "VirtualMachine" [ 2108.017808] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2108.018118] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-09136823-4166-4f2c-a05c-588011f10796 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.024869] env[62519]: DEBUG oslo_vmware.rw_handles [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease: (returnval){ [ 2108.024869] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ef594d-48cf-8c23-8537-05967d94fde8" [ 2108.024869] env[62519]: _type = "HttpNfcLease" [ 2108.024869] env[62519]: } obtained for exporting VM: (result){ [ 2108.024869] env[62519]: value = "vm-373889" [ 2108.024869] env[62519]: _type = "VirtualMachine" [ 2108.024869] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2108.025212] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the lease: (returnval){ [ 2108.025212] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ef594d-48cf-8c23-8537-05967d94fde8" [ 2108.025212] env[62519]: _type = "HttpNfcLease" [ 2108.025212] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2108.030724] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2108.030724] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ef594d-48cf-8c23-8537-05967d94fde8" [ 2108.030724] env[62519]: _type = "HttpNfcLease" [ 2108.030724] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2108.095869] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b37306-4172-4adc-81a0-0b666950e118 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.100202] env[62519]: DEBUG nova.compute.manager [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2108.105479] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2585c311-ef84-43f8-886d-15fd8c587d03 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.139137] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812519b8-cf8c-4fd6-8ebc-8718437bb422 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.148556] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88c02eb-8f7f-4e68-ace8-0d5bee0da1ed {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.164672] env[62519]: DEBUG nova.compute.provider_tree [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2108.536663] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2108.536663] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ef594d-48cf-8c23-8537-05967d94fde8" [ 2108.536663] env[62519]: _type = "HttpNfcLease" [ 2108.536663] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2108.537172] env[62519]: DEBUG oslo_vmware.rw_handles [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2108.537172] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ef594d-48cf-8c23-8537-05967d94fde8" [ 2108.537172] env[62519]: _type = "HttpNfcLease" [ 2108.537172] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2108.538310] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef98b91d-fdf7-4772-bc14-2f3cba70efb6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.549400] env[62519]: DEBUG oslo_vmware.rw_handles [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529ebde4-f4b7-189b-c7df-64d66ae1ede6/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2108.549704] env[62519]: DEBUG oslo_vmware.rw_handles [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529ebde4-f4b7-189b-c7df-64d66ae1ede6/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2108.666414] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2108.667653] env[62519]: DEBUG nova.scheduler.client.report [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2108.680328] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5f85f901-06ed-4d54-97f0-c3cdb58f68f5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.174369] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.147s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2109.175015] env[62519]: DEBUG nova.compute.manager [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2109.178163] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.512s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2109.180497] env[62519]: INFO nova.compute.claims [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2109.688419] env[62519]: DEBUG nova.compute.utils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2109.690530] env[62519]: DEBUG nova.compute.manager [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2109.691889] env[62519]: DEBUG nova.network.neutron [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2109.743612] env[62519]: DEBUG nova.policy [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e3681fd31294e5cbdfbfea52a47eec0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff4895c6c47e438e8fb9fbc0ffbfdc82', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2110.011848] env[62519]: DEBUG nova.network.neutron [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Successfully created port: 53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2110.193945] env[62519]: DEBUG nova.compute.manager [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2110.259133] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c49ea72-302c-4949-aefd-535fb12224e8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.267573] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3eaf66-2417-45a0-9b2d-4c6ce25f9184 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.299970] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f4d492-d6e7-4903-a73e-c71518f8c9b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.307799] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78bd0e33-c5cb-4a5e-827c-3cc0bdfff5ea {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.321473] env[62519]: DEBUG nova.compute.provider_tree [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2110.824750] env[62519]: DEBUG nova.scheduler.client.report [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2111.204363] env[62519]: DEBUG nova.compute.manager [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2111.237849] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2111.238175] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2111.238405] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2111.238632] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2111.238788] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2111.238938] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2111.239171] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2111.239380] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2111.239629] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2111.239840] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2111.240201] env[62519]: DEBUG nova.virt.hardware [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2111.241100] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd92cd75-6c0f-4f2e-b018-b2c46768adc2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.249964] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95de38c-69cb-42b7-8db4-951f254e82db {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.330546] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.152s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.331129] env[62519]: DEBUG nova.compute.manager [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2111.413924] env[62519]: DEBUG nova.compute.manager [req-1024a072-25a8-4a4a-ba4d-d03257b2d505 req-36c861e6-951f-4fe1-98ba-47e352036ae7 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received event network-vif-plugged-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2111.414203] env[62519]: DEBUG oslo_concurrency.lockutils [req-1024a072-25a8-4a4a-ba4d-d03257b2d505 req-36c861e6-951f-4fe1-98ba-47e352036ae7 service nova] Acquiring lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.414451] env[62519]: DEBUG oslo_concurrency.lockutils [req-1024a072-25a8-4a4a-ba4d-d03257b2d505 req-36c861e6-951f-4fe1-98ba-47e352036ae7 service nova] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.414664] env[62519]: DEBUG oslo_concurrency.lockutils [req-1024a072-25a8-4a4a-ba4d-d03257b2d505 req-36c861e6-951f-4fe1-98ba-47e352036ae7 service nova] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.414872] env[62519]: DEBUG nova.compute.manager [req-1024a072-25a8-4a4a-ba4d-d03257b2d505 req-36c861e6-951f-4fe1-98ba-47e352036ae7 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] No waiting events found dispatching network-vif-plugged-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2111.415091] env[62519]: WARNING nova.compute.manager [req-1024a072-25a8-4a4a-ba4d-d03257b2d505 req-36c861e6-951f-4fe1-98ba-47e352036ae7 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received unexpected event network-vif-plugged-53ef6650-7c4f-4870-bdb3-217af06cf895 for instance with vm_state building and task_state spawning. [ 2111.503471] env[62519]: DEBUG nova.network.neutron [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Successfully updated port: 53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2111.837062] env[62519]: DEBUG nova.compute.utils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2111.838513] env[62519]: DEBUG nova.compute.manager [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2111.838692] env[62519]: DEBUG nova.network.neutron [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2111.877063] env[62519]: DEBUG nova.policy [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c586ba4158b947b0a25d1614c17ebb51', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12977ed65a1b410a987b049e9d1dce3e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2112.006284] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2112.006526] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2112.006669] env[62519]: DEBUG nova.network.neutron [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2112.145031] env[62519]: DEBUG nova.network.neutron [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Successfully created port: 90575ec3-2f97-43ef-97d9-21e1847868c9 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2112.341497] env[62519]: DEBUG nova.compute.manager [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2112.547562] env[62519]: DEBUG nova.network.neutron [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2112.675013] env[62519]: DEBUG nova.network.neutron [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [{"id": "53ef6650-7c4f-4870-bdb3-217af06cf895", "address": "fa:16:3e:dc:e6:e0", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ef6650-7c", "ovs_interfaceid": "53ef6650-7c4f-4870-bdb3-217af06cf895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.178255] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2113.178613] env[62519]: DEBUG nova.compute.manager [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Instance network_info: |[{"id": "53ef6650-7c4f-4870-bdb3-217af06cf895", "address": "fa:16:3e:dc:e6:e0", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ef6650-7c", "ovs_interfaceid": "53ef6650-7c4f-4870-bdb3-217af06cf895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2113.179095] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:e6:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53ef6650-7c4f-4870-bdb3-217af06cf895', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2113.186853] env[62519]: DEBUG oslo.service.loopingcall [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2113.187105] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2113.187398] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42d8e3d3-cdfd-41db-be27-c6d7802ae673 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.207462] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2113.207462] env[62519]: value = "task-1803579" [ 2113.207462] env[62519]: _type = "Task" [ 2113.207462] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.215640] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803579, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.352384] env[62519]: DEBUG nova.compute.manager [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2113.381137] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2113.381396] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2113.381555] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2113.381763] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2113.381896] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2113.382067] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2113.382335] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2113.382708] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2113.382708] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2113.382896] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2113.383127] env[62519]: DEBUG nova.virt.hardware [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2113.384148] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28c234c-39ad-4ec6-9ed5-275278965b44 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.393217] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2821dd73-4602-4477-beb8-7bade12f72dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.441066] env[62519]: DEBUG nova.compute.manager [req-355eab1a-3761-49b3-8e00-bebbd210577a req-190c7cb5-5116-4a90-937f-29547c320875 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received event network-changed-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2113.441283] env[62519]: DEBUG nova.compute.manager [req-355eab1a-3761-49b3-8e00-bebbd210577a req-190c7cb5-5116-4a90-937f-29547c320875 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Refreshing instance network info cache due to event network-changed-53ef6650-7c4f-4870-bdb3-217af06cf895. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2113.441515] env[62519]: DEBUG oslo_concurrency.lockutils [req-355eab1a-3761-49b3-8e00-bebbd210577a req-190c7cb5-5116-4a90-937f-29547c320875 service nova] Acquiring lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2113.442364] env[62519]: DEBUG oslo_concurrency.lockutils [req-355eab1a-3761-49b3-8e00-bebbd210577a req-190c7cb5-5116-4a90-937f-29547c320875 service nova] Acquired lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2113.442364] env[62519]: DEBUG nova.network.neutron [req-355eab1a-3761-49b3-8e00-bebbd210577a req-190c7cb5-5116-4a90-937f-29547c320875 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Refreshing network info cache for port 53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2113.648763] env[62519]: DEBUG nova.network.neutron [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Successfully updated port: 90575ec3-2f97-43ef-97d9-21e1847868c9 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2113.717872] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803579, 'name': CreateVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.149129] env[62519]: DEBUG nova.network.neutron [req-355eab1a-3761-49b3-8e00-bebbd210577a req-190c7cb5-5116-4a90-937f-29547c320875 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updated VIF entry in instance network info cache for port 53ef6650-7c4f-4870-bdb3-217af06cf895. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2114.149513] env[62519]: DEBUG nova.network.neutron [req-355eab1a-3761-49b3-8e00-bebbd210577a req-190c7cb5-5116-4a90-937f-29547c320875 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [{"id": "53ef6650-7c4f-4870-bdb3-217af06cf895", "address": "fa:16:3e:dc:e6:e0", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ef6650-7c", "ovs_interfaceid": "53ef6650-7c4f-4870-bdb3-217af06cf895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2114.151027] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.151163] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.151293] env[62519]: DEBUG nova.network.neutron [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2114.217882] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803579, 'name': CreateVM_Task, 'duration_secs': 0.552049} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.218139] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2114.218860] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.219084] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.219470] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2114.219735] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d33a6cf9-eec5-4c5f-baa6-dc2556af96d4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.224776] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2114.224776] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cbeec4-2d9f-ada9-7521-d20d78f764a4" [ 2114.224776] env[62519]: _type = "Task" [ 2114.224776] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.232488] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cbeec4-2d9f-ada9-7521-d20d78f764a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.654023] env[62519]: DEBUG oslo_concurrency.lockutils [req-355eab1a-3761-49b3-8e00-bebbd210577a req-190c7cb5-5116-4a90-937f-29547c320875 service nova] Releasing lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.686601] env[62519]: DEBUG nova.network.neutron [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2114.738348] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52cbeec4-2d9f-ada9-7521-d20d78f764a4, 'name': SearchDatastore_Task, 'duration_secs': 0.015753} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.738681] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.738910] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2114.739154] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.739302] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.739473] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2114.739734] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-393bbbb0-f43c-4149-9efc-7cf047180cab {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.748479] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2114.748687] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2114.749398] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d4a6db1-fce8-484b-b1f6-3357dd84125d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.754902] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2114.754902] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529c3ead-d9b7-6708-21b2-17de3c9c3e99" [ 2114.754902] env[62519]: _type = "Task" [ 2114.754902] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.764186] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529c3ead-d9b7-6708-21b2-17de3c9c3e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.829124] env[62519]: DEBUG nova.network.neutron [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Updating instance_info_cache with network_info: [{"id": "90575ec3-2f97-43ef-97d9-21e1847868c9", "address": "fa:16:3e:59:fe:ac", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90575ec3-2f", "ovs_interfaceid": "90575ec3-2f97-43ef-97d9-21e1847868c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.267053] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529c3ead-d9b7-6708-21b2-17de3c9c3e99, 'name': SearchDatastore_Task, 'duration_secs': 0.06736} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.267369] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0e5cefd-1d04-40fc-8980-cc7c8cf81734 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.272992] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2115.272992] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5235c53e-ce9c-70fa-da77-e53acacb1495" [ 2115.272992] env[62519]: _type = "Task" [ 2115.272992] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.280899] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5235c53e-ce9c-70fa-da77-e53acacb1495, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.331786] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.332134] env[62519]: DEBUG nova.compute.manager [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Instance network_info: |[{"id": "90575ec3-2f97-43ef-97d9-21e1847868c9", "address": "fa:16:3e:59:fe:ac", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90575ec3-2f", "ovs_interfaceid": "90575ec3-2f97-43ef-97d9-21e1847868c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2115.332643] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:fe:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e028024-a9c1-4cae-8849-ea770a7ae0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90575ec3-2f97-43ef-97d9-21e1847868c9', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2115.340043] env[62519]: DEBUG oslo.service.loopingcall [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2115.340273] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2115.340499] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fcc104e-2fce-4600-839c-27d5d1d34c42 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.360764] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2115.360764] env[62519]: value = "task-1803580" [ 2115.360764] env[62519]: _type = "Task" [ 2115.360764] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.368509] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803580, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.470046] env[62519]: DEBUG nova.compute.manager [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Received event network-vif-plugged-90575ec3-2f97-43ef-97d9-21e1847868c9 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2115.470046] env[62519]: DEBUG oslo_concurrency.lockutils [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] Acquiring lock "737d119d-943f-4026-8b56-a4856efb6e87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.470210] env[62519]: DEBUG oslo_concurrency.lockutils [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] Lock "737d119d-943f-4026-8b56-a4856efb6e87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.470476] env[62519]: DEBUG oslo_concurrency.lockutils [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] Lock "737d119d-943f-4026-8b56-a4856efb6e87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.470671] env[62519]: DEBUG nova.compute.manager [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] No waiting events found dispatching network-vif-plugged-90575ec3-2f97-43ef-97d9-21e1847868c9 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2115.470845] env[62519]: WARNING nova.compute.manager [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Received unexpected event network-vif-plugged-90575ec3-2f97-43ef-97d9-21e1847868c9 for instance with vm_state building and task_state spawning. [ 2115.471036] env[62519]: DEBUG nova.compute.manager [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Received event network-changed-90575ec3-2f97-43ef-97d9-21e1847868c9 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2115.471213] env[62519]: DEBUG nova.compute.manager [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Refreshing instance network info cache due to event network-changed-90575ec3-2f97-43ef-97d9-21e1847868c9. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2115.471438] env[62519]: DEBUG oslo_concurrency.lockutils [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] Acquiring lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.471615] env[62519]: DEBUG oslo_concurrency.lockutils [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] Acquired lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.471757] env[62519]: DEBUG nova.network.neutron [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Refreshing network info cache for port 90575ec3-2f97-43ef-97d9-21e1847868c9 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2115.783503] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5235c53e-ce9c-70fa-da77-e53acacb1495, 'name': SearchDatastore_Task, 'duration_secs': 0.012924} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.783898] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.784062] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864/f0ebedcb-c064-4604-be62-7c7d5d419864.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2115.784338] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3839fc5d-4a46-462c-b96d-3ee44970d767 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.790951] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2115.790951] env[62519]: value = "task-1803581" [ 2115.790951] env[62519]: _type = "Task" [ 2115.790951] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.798774] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803581, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.871412] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803580, 'name': CreateVM_Task, 'duration_secs': 0.356906} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.871585] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2115.872332] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.872487] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.872807] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2115.873096] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6abbcb6a-8425-413d-bf42-8bcd20f5f17a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.877838] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2115.877838] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5270dad2-f5b8-7190-e5d1-5d10b600a764" [ 2115.877838] env[62519]: _type = "Task" [ 2115.877838] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.885594] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5270dad2-f5b8-7190-e5d1-5d10b600a764, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.907544] env[62519]: DEBUG oslo_vmware.rw_handles [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529ebde4-f4b7-189b-c7df-64d66ae1ede6/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2115.908443] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b977c0d1-8301-41f5-951c-ee028fe1ceeb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.914155] env[62519]: DEBUG oslo_vmware.rw_handles [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529ebde4-f4b7-189b-c7df-64d66ae1ede6/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2115.914322] env[62519]: ERROR oslo_vmware.rw_handles [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529ebde4-f4b7-189b-c7df-64d66ae1ede6/disk-0.vmdk due to incomplete transfer. [ 2115.914548] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-41e802bc-19df-458e-afdf-d6ffd1767b11 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.920674] env[62519]: DEBUG oslo_vmware.rw_handles [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529ebde4-f4b7-189b-c7df-64d66ae1ede6/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2115.920872] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Uploaded image d4cf8f55-a2e4-4621-a02e-e1e02e813afd to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2115.923315] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2115.923547] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a4de00ca-fb10-4e58-9ada-6c603a326e84 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.929141] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2115.929141] env[62519]: value = "task-1803582" [ 2115.929141] env[62519]: _type = "Task" [ 2115.929141] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.936608] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803582, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.215581] env[62519]: DEBUG nova.network.neutron [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Updated VIF entry in instance network info cache for port 90575ec3-2f97-43ef-97d9-21e1847868c9. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2116.215960] env[62519]: DEBUG nova.network.neutron [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Updating instance_info_cache with network_info: [{"id": "90575ec3-2f97-43ef-97d9-21e1847868c9", "address": "fa:16:3e:59:fe:ac", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90575ec3-2f", "ovs_interfaceid": "90575ec3-2f97-43ef-97d9-21e1847868c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.301246] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803581, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.390550] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5270dad2-f5b8-7190-e5d1-5d10b600a764, 'name': SearchDatastore_Task, 'duration_secs': 0.009162} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.390851] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.391088] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2116.391319] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.391463] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.391634] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2116.391895] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c43e378-a799-4532-97ed-22d86fddb5d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.404084] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2116.404370] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2116.404979] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddc34acb-1caf-4e0d-9cf8-f155c32fce15 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.410161] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2116.410161] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529cdd4d-bd29-2aec-6028-aaaf73260eff" [ 2116.410161] env[62519]: _type = "Task" [ 2116.410161] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.417931] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529cdd4d-bd29-2aec-6028-aaaf73260eff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.438154] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803582, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.718701] env[62519]: DEBUG oslo_concurrency.lockutils [req-87c6d446-6cd0-4927-b8ab-d2c13190707a req-7e552b86-b31e-4541-a825-f5a0c0a7c8aa service nova] Releasing lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.802752] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803581, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525464} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.803154] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864/f0ebedcb-c064-4604-be62-7c7d5d419864.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2116.803435] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2116.803688] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffa63eb0-44a2-42ba-a07c-2eaa2349daca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.810020] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2116.810020] env[62519]: value = "task-1803583" [ 2116.810020] env[62519]: _type = "Task" [ 2116.810020] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.818274] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803583, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.919939] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]529cdd4d-bd29-2aec-6028-aaaf73260eff, 'name': SearchDatastore_Task, 'duration_secs': 0.046388} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.920747] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8609dd94-af9e-4f74-956a-39121346c2d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.925762] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2116.925762] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52baaabb-6053-ecd4-866b-1c9e51ef7a9d" [ 2116.925762] env[62519]: _type = "Task" [ 2116.925762] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.933433] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52baaabb-6053-ecd4-866b-1c9e51ef7a9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.941045] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803582, 'name': Destroy_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.319385] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803583, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064322} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.319653] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2117.320379] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fba20f5-54d3-4482-99e0-893a3c4c0e82 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.341693] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864/f0ebedcb-c064-4604-be62-7c7d5d419864.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2117.341920] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59736103-c51a-4609-b1eb-28805db6baf6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.360913] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2117.360913] env[62519]: value = "task-1803584" [ 2117.360913] env[62519]: _type = "Task" [ 2117.360913] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.369187] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803584, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.436325] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52baaabb-6053-ecd4-866b-1c9e51ef7a9d, 'name': SearchDatastore_Task, 'duration_secs': 0.01695} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.439794] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.440175] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 737d119d-943f-4026-8b56-a4856efb6e87/737d119d-943f-4026-8b56-a4856efb6e87.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2117.440456] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c68e92f9-da0b-45a7-ace0-228eef872816 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.447879] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803582, 'name': Destroy_Task, 'duration_secs': 1.031164} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.449113] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Destroyed the VM [ 2117.449373] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2117.449659] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2117.449659] env[62519]: value = "task-1803585" [ 2117.449659] env[62519]: _type = "Task" [ 2117.449659] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.449835] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-adfb02f5-9575-4b46-b7a9-24db732fd3a7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.458561] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.459749] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2117.459749] env[62519]: value = "task-1803586" [ 2117.459749] env[62519]: _type = "Task" [ 2117.459749] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.466897] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803586, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.872768] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803584, 'name': ReconfigVM_Task, 'duration_secs': 0.270326} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.873228] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Reconfigured VM instance instance-0000007b to attach disk [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864/f0ebedcb-c064-4604-be62-7c7d5d419864.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2117.873891] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fb7db9a-50ad-4831-862c-6862262b37f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.882091] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2117.882091] env[62519]: value = "task-1803587" [ 2117.882091] env[62519]: _type = "Task" [ 2117.882091] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.891811] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803587, 'name': Rename_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.961978] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465257} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.965344] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 737d119d-943f-4026-8b56-a4856efb6e87/737d119d-943f-4026-8b56-a4856efb6e87.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2117.965618] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2117.965902] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c435aeb3-7f27-43e5-9f3a-23ef1a690b07 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.973295] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803586, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.974623] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2117.974623] env[62519]: value = "task-1803588" [ 2117.974623] env[62519]: _type = "Task" [ 2117.974623] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.982450] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803588, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.391351] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803587, 'name': Rename_Task, 'duration_secs': 0.142522} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.391645] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2118.391881] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7c06714-103b-4c9f-b848-a40eb50ecac6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.398613] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2118.398613] env[62519]: value = "task-1803589" [ 2118.398613] env[62519]: _type = "Task" [ 2118.398613] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.405908] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803589, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.471095] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803586, 'name': RemoveSnapshot_Task, 'duration_secs': 0.602609} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.471323] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2118.471617] env[62519]: DEBUG nova.compute.manager [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2118.472420] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3d0a53-96b4-4d86-9065-af42fa57b5d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.486967] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803588, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061726} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.486967] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2118.486967] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33b9701-de55-4359-afab-24dd9345423b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.508126] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 737d119d-943f-4026-8b56-a4856efb6e87/737d119d-943f-4026-8b56-a4856efb6e87.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2118.508411] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbbaef3a-11b9-44bd-ab70-103124ebb1a8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.528266] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2118.528266] env[62519]: value = "task-1803590" [ 2118.528266] env[62519]: _type = "Task" [ 2118.528266] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.536262] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803590, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.761649] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2118.908441] env[62519]: DEBUG oslo_vmware.api [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803589, 'name': PowerOnVM_Task, 'duration_secs': 0.502287} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.908836] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2118.908883] env[62519]: INFO nova.compute.manager [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Took 7.70 seconds to spawn the instance on the hypervisor. [ 2118.909064] env[62519]: DEBUG nova.compute.manager [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2118.909844] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b61c11-9cb8-4b7f-b672-bf56b49f1e3f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.985289] env[62519]: INFO nova.compute.manager [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Shelve offloading [ 2119.039638] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803590, 'name': ReconfigVM_Task, 'duration_secs': 0.392978} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.041028] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 737d119d-943f-4026-8b56-a4856efb6e87/737d119d-943f-4026-8b56-a4856efb6e87.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2119.041028] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a86ed540-0aa7-439d-97f1-341dc8bd19dd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.046452] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2119.046452] env[62519]: value = "task-1803591" [ 2119.046452] env[62519]: _type = "Task" [ 2119.046452] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.053585] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803591, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.425817] env[62519]: INFO nova.compute.manager [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Took 12.43 seconds to build instance. [ 2119.489056] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2119.489280] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32fa0207-9c1d-444c-a96e-b3aec2a0153e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.497406] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2119.497406] env[62519]: value = "task-1803592" [ 2119.497406] env[62519]: _type = "Task" [ 2119.497406] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.508894] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2119.509162] env[62519]: DEBUG nova.compute.manager [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2119.509978] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f309cdea-beba-439c-affb-95857708d505 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.515837] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2119.516054] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2119.516309] env[62519]: DEBUG nova.network.neutron [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2119.556365] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803591, 'name': Rename_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.056954] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803591, 'name': Rename_Task, 'duration_secs': 0.513466} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.057289] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2120.057517] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5fbda66-855a-42dc-9c9d-5fb368953418 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.064379] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2120.064379] env[62519]: value = "task-1803593" [ 2120.064379] env[62519]: _type = "Task" [ 2120.064379] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.073322] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803593, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.539472] env[62519]: DEBUG nova.network.neutron [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [{"id": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "address": "fa:16:3e:4e:44:c5", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16e1aad6-27", "ovs_interfaceid": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2120.577473] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803593, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.645912] env[62519]: DEBUG nova.compute.manager [req-5aecc178-47b9-4f2f-ae1b-06d7252090e7 req-c08c9afc-00bf-4b11-a40c-0d7593fbfda2 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received event network-changed-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2120.646124] env[62519]: DEBUG nova.compute.manager [req-5aecc178-47b9-4f2f-ae1b-06d7252090e7 req-c08c9afc-00bf-4b11-a40c-0d7593fbfda2 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Refreshing instance network info cache due to event network-changed-53ef6650-7c4f-4870-bdb3-217af06cf895. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2120.646344] env[62519]: DEBUG oslo_concurrency.lockutils [req-5aecc178-47b9-4f2f-ae1b-06d7252090e7 req-c08c9afc-00bf-4b11-a40c-0d7593fbfda2 service nova] Acquiring lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.646488] env[62519]: DEBUG oslo_concurrency.lockutils [req-5aecc178-47b9-4f2f-ae1b-06d7252090e7 req-c08c9afc-00bf-4b11-a40c-0d7593fbfda2 service nova] Acquired lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.646649] env[62519]: DEBUG nova.network.neutron [req-5aecc178-47b9-4f2f-ae1b-06d7252090e7 req-c08c9afc-00bf-4b11-a40c-0d7593fbfda2 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Refreshing network info cache for port 53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2120.933599] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c6ba6228-cc7a-482d-9863-b3873bce0ba7 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.951s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.042717] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.075960] env[62519]: DEBUG oslo_vmware.api [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803593, 'name': PowerOnVM_Task, 'duration_secs': 0.706985} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.076302] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2121.076429] env[62519]: INFO nova.compute.manager [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Took 7.72 seconds to spawn the instance on the hypervisor. [ 2121.076592] env[62519]: DEBUG nova.compute.manager [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2121.077323] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6bd814-7d3c-49d2-a483-38e28d2ea55b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.524145] env[62519]: DEBUG nova.network.neutron [req-5aecc178-47b9-4f2f-ae1b-06d7252090e7 req-c08c9afc-00bf-4b11-a40c-0d7593fbfda2 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updated VIF entry in instance network info cache for port 53ef6650-7c4f-4870-bdb3-217af06cf895. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2121.524635] env[62519]: DEBUG nova.network.neutron [req-5aecc178-47b9-4f2f-ae1b-06d7252090e7 req-c08c9afc-00bf-4b11-a40c-0d7593fbfda2 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [{"id": "53ef6650-7c4f-4870-bdb3-217af06cf895", "address": "fa:16:3e:dc:e6:e0", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ef6650-7c", "ovs_interfaceid": "53ef6650-7c4f-4870-bdb3-217af06cf895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2121.544802] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2121.545822] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef56ba3e-634e-4001-99bb-dc84647565d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.555029] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2121.555189] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0420a5b4-aa4a-48ab-a1ce-e47e5d82aeb4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.593969] env[62519]: INFO nova.compute.manager [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Took 12.95 seconds to build instance. [ 2121.639241] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2121.639482] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2121.639631] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleting the datastore file [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2121.639895] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a38b122-429a-4bc3-9dae-47428eeb9099 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.646604] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2121.646604] env[62519]: value = "task-1803595" [ 2121.646604] env[62519]: _type = "Task" [ 2121.646604] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.654774] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803595, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.739396] env[62519]: DEBUG nova.compute.manager [req-b7e0948a-ad3a-4669-93f6-a5440cf6c86b req-27b26152-baca-43fb-b52c-d54d758754d7 service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Received event network-changed-90575ec3-2f97-43ef-97d9-21e1847868c9 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2121.739592] env[62519]: DEBUG nova.compute.manager [req-b7e0948a-ad3a-4669-93f6-a5440cf6c86b req-27b26152-baca-43fb-b52c-d54d758754d7 service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Refreshing instance network info cache due to event network-changed-90575ec3-2f97-43ef-97d9-21e1847868c9. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2121.739822] env[62519]: DEBUG oslo_concurrency.lockutils [req-b7e0948a-ad3a-4669-93f6-a5440cf6c86b req-27b26152-baca-43fb-b52c-d54d758754d7 service nova] Acquiring lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.740530] env[62519]: DEBUG oslo_concurrency.lockutils [req-b7e0948a-ad3a-4669-93f6-a5440cf6c86b req-27b26152-baca-43fb-b52c-d54d758754d7 service nova] Acquired lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.740530] env[62519]: DEBUG nova.network.neutron [req-b7e0948a-ad3a-4669-93f6-a5440cf6c86b req-27b26152-baca-43fb-b52c-d54d758754d7 service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Refreshing network info cache for port 90575ec3-2f97-43ef-97d9-21e1847868c9 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2122.027701] env[62519]: DEBUG oslo_concurrency.lockutils [req-5aecc178-47b9-4f2f-ae1b-06d7252090e7 req-c08c9afc-00bf-4b11-a40c-0d7593fbfda2 service nova] Releasing lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.157336] env[62519]: DEBUG oslo_vmware.api [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803595, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133185} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.157689] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2122.157749] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2122.157886] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2122.316502] env[62519]: INFO nova.scheduler.client.report [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted allocations for instance 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 [ 2122.448030] env[62519]: DEBUG nova.network.neutron [req-b7e0948a-ad3a-4669-93f6-a5440cf6c86b req-27b26152-baca-43fb-b52c-d54d758754d7 service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Updated VIF entry in instance network info cache for port 90575ec3-2f97-43ef-97d9-21e1847868c9. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2122.448401] env[62519]: DEBUG nova.network.neutron [req-b7e0948a-ad3a-4669-93f6-a5440cf6c86b req-27b26152-baca-43fb-b52c-d54d758754d7 service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Updating instance_info_cache with network_info: [{"id": "90575ec3-2f97-43ef-97d9-21e1847868c9", "address": "fa:16:3e:59:fe:ac", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90575ec3-2f", "ovs_interfaceid": "90575ec3-2f97-43ef-97d9-21e1847868c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2122.675569] env[62519]: DEBUG nova.compute.manager [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received event network-vif-unplugged-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2122.675709] env[62519]: DEBUG oslo_concurrency.lockutils [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] Acquiring lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.675919] env[62519]: DEBUG oslo_concurrency.lockutils [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.676105] env[62519]: DEBUG oslo_concurrency.lockutils [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.676276] env[62519]: DEBUG nova.compute.manager [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] No waiting events found dispatching network-vif-unplugged-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2122.676441] env[62519]: WARNING nova.compute.manager [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received unexpected event network-vif-unplugged-16e1aad6-277c-4fb9-95ca-00a66fecb425 for instance with vm_state shelved_offloaded and task_state None. [ 2122.676646] env[62519]: DEBUG nova.compute.manager [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received event network-changed-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2122.676811] env[62519]: DEBUG nova.compute.manager [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Refreshing instance network info cache due to event network-changed-16e1aad6-277c-4fb9-95ca-00a66fecb425. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2122.676993] env[62519]: DEBUG oslo_concurrency.lockutils [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] Acquiring lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2122.677144] env[62519]: DEBUG oslo_concurrency.lockutils [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] Acquired lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2122.677302] env[62519]: DEBUG nova.network.neutron [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Refreshing network info cache for port 16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2122.821074] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.821383] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.821605] env[62519]: DEBUG nova.objects.instance [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'resources' on Instance uuid 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2122.951516] env[62519]: DEBUG oslo_concurrency.lockutils [req-b7e0948a-ad3a-4669-93f6-a5440cf6c86b req-27b26152-baca-43fb-b52c-d54d758754d7 service nova] Releasing lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2123.101028] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2274095b-1344-44f6-84ba-47d89da2907c tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "737d119d-943f-4026-8b56-a4856efb6e87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.506s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.323776] env[62519]: DEBUG nova.objects.instance [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'numa_topology' on Instance uuid 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2123.404783] env[62519]: DEBUG nova.network.neutron [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updated VIF entry in instance network info cache for port 16e1aad6-277c-4fb9-95ca-00a66fecb425. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2123.404980] env[62519]: DEBUG nova.network.neutron [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [{"id": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "address": "fa:16:3e:4e:44:c5", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": null, "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap16e1aad6-27", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2123.761745] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.761917] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.826186] env[62519]: DEBUG nova.objects.base [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Object Instance<812842c2-ac72-4d12-b2f7-3ccfe77a13a7> lazy-loaded attributes: resources,numa_topology {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2123.870292] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04da4ad9-b0d6-4ada-9c4a-d7f9335ee99d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.878308] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71d7a5b-bcef-4c0b-a552-f6e357d73888 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.907805] env[62519]: DEBUG oslo_concurrency.lockutils [req-44605e82-23d0-41de-a97f-0e64ff1bb49c req-5e4a4de0-88d3-44dd-bd91-c01f230e30e4 service nova] Releasing lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2123.908752] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757ef306-0b82-4590-a462-7326b8cf309c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.915799] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882221ba-83b0-466c-9bee-f1f580b186f1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.928518] env[62519]: DEBUG nova.compute.provider_tree [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2124.265458] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.431493] env[62519]: DEBUG nova.scheduler.client.report [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2124.514467] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.936175] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.938820] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.673s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.939015] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.939181] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2124.940338] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac67819-16a9-4c69-a8ea-1cf7b757171b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.949116] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b024a3-3605-47be-b00e-1724eb7f15e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.963784] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465f20be-eb27-4cc5-b4b9-df6d08f0d18e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.970103] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e66335-2d67-4c71-8d8c-bdb0d4cb8556 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.000422] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180363MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2125.000567] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.000755] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.448371] env[62519]: DEBUG oslo_concurrency.lockutils [None req-42818f76-9f11-4817-ac19-14e60432aa20 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.608s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.449009] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.935s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.449210] env[62519]: INFO nova.compute.manager [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Unshelving [ 2126.021113] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f0ebedcb-c064-4604-be62-7c7d5d419864 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2126.021274] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 737d119d-943f-4026-8b56-a4856efb6e87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2126.472981] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.524166] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2126.524400] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2126.524548] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2126.574476] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ccbd03-36fd-464b-8c10-a12a848ea349 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.582419] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efede454-3c40-4523-9ffe-9c2ec661c46c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.612460] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1de2ce-b963-4568-a5c6-67d4b42cf4fa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.619755] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0ea698-e7a8-44cd-a3da-bf99b5780816 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.632778] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2127.136080] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2127.641477] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2127.641871] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.641s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.641964] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.169s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.642245] env[62519]: DEBUG nova.objects.instance [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'pci_requests' on Instance uuid 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2128.146644] env[62519]: DEBUG nova.objects.instance [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'numa_topology' on Instance uuid 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2128.643455] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2128.643818] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2128.643818] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2128.643892] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2128.649506] env[62519]: INFO nova.compute.claims [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2128.762277] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2128.762528] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 2129.705667] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d2b7ce-9509-4604-898d-b7542445fbbe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.713197] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001e83f9-ee84-4b6a-9211-6226cb6829b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.741847] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cbab38-a9b7-4c25-a138-3d077e5a206a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.748839] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab288f21-7a10-407b-ba10-ebb288a2ad54 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.761715] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2129.761870] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 2129.761988] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 2129.763458] env[62519]: DEBUG nova.compute.provider_tree [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2130.266780] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2130.266928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquired lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.267046] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Forcefully refreshing network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2130.267199] env[62519]: DEBUG nova.objects.instance [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lazy-loading 'info_cache' on Instance uuid 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2130.268943] env[62519]: DEBUG nova.scheduler.client.report [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2130.774860] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.133s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.803976] env[62519]: INFO nova.network.neutron [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating port 16e1aad6-277c-4fb9-95ca-00a66fecb425 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2131.997829] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [{"id": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "address": "fa:16:3e:4e:44:c5", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": null, "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap16e1aad6-27", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.188327] env[62519]: DEBUG nova.compute.manager [req-564e25aa-99ad-4c49-9b95-39112fb4d827 req-626b3c3b-3c8f-4dc0-9d80-5bf2c0b6fc59 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received event network-vif-plugged-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2132.188588] env[62519]: DEBUG oslo_concurrency.lockutils [req-564e25aa-99ad-4c49-9b95-39112fb4d827 req-626b3c3b-3c8f-4dc0-9d80-5bf2c0b6fc59 service nova] Acquiring lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.188732] env[62519]: DEBUG oslo_concurrency.lockutils [req-564e25aa-99ad-4c49-9b95-39112fb4d827 req-626b3c3b-3c8f-4dc0-9d80-5bf2c0b6fc59 service nova] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.188917] env[62519]: DEBUG oslo_concurrency.lockutils [req-564e25aa-99ad-4c49-9b95-39112fb4d827 req-626b3c3b-3c8f-4dc0-9d80-5bf2c0b6fc59 service nova] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.189096] env[62519]: DEBUG nova.compute.manager [req-564e25aa-99ad-4c49-9b95-39112fb4d827 req-626b3c3b-3c8f-4dc0-9d80-5bf2c0b6fc59 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] No waiting events found dispatching network-vif-plugged-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2132.189266] env[62519]: WARNING nova.compute.manager [req-564e25aa-99ad-4c49-9b95-39112fb4d827 req-626b3c3b-3c8f-4dc0-9d80-5bf2c0b6fc59 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received unexpected event network-vif-plugged-16e1aad6-277c-4fb9-95ca-00a66fecb425 for instance with vm_state shelved_offloaded and task_state spawning. [ 2132.271321] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.500904] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Releasing lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2132.501160] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updated the network info_cache for instance {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10489}} [ 2132.501451] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.501627] env[62519]: DEBUG nova.network.neutron [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2133.206044] env[62519]: DEBUG nova.network.neutron [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [{"id": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "address": "fa:16:3e:4e:44:c5", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16e1aad6-27", "ovs_interfaceid": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.709008] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.739730] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1df3b492c07f9a4095f4349e04a474ea',container_format='bare',created_at=2024-12-12T08:23:05Z,direct_url=,disk_format='vmdk',id=d4cf8f55-a2e4-4621-a02e-e1e02e813afd,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1254252151-shelved',owner='d73e71476254453fb23164dce09c6d41',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2024-12-12T08:23:19Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2133.739987] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2133.740147] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2133.740379] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2133.740532] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2133.740680] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2133.740893] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2133.741057] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2133.741224] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2133.741385] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2133.741553] env[62519]: DEBUG nova.virt.hardware [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2133.742454] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2378d310-ff5e-4200-8fb3-b32c0f93fc6e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.750647] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8509bdc-3aba-4b54-8eb3-bf500382fa8e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.764346] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:44:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16e1aad6-277c-4fb9-95ca-00a66fecb425', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2133.771984] env[62519]: DEBUG oslo.service.loopingcall [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2133.772261] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2133.772484] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99f51e6b-e9c9-460f-9ade-d4ebc22df1f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.792957] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2133.792957] env[62519]: value = "task-1803596" [ 2133.792957] env[62519]: _type = "Task" [ 2133.792957] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.800496] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803596, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.213558] env[62519]: DEBUG nova.compute.manager [req-49b8694e-2515-4659-baea-a6edf681545d req-370725e5-e617-4bc3-864f-7da3e8f2cc42 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received event network-changed-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2134.213824] env[62519]: DEBUG nova.compute.manager [req-49b8694e-2515-4659-baea-a6edf681545d req-370725e5-e617-4bc3-864f-7da3e8f2cc42 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Refreshing instance network info cache due to event network-changed-16e1aad6-277c-4fb9-95ca-00a66fecb425. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2134.213926] env[62519]: DEBUG oslo_concurrency.lockutils [req-49b8694e-2515-4659-baea-a6edf681545d req-370725e5-e617-4bc3-864f-7da3e8f2cc42 service nova] Acquiring lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.214087] env[62519]: DEBUG oslo_concurrency.lockutils [req-49b8694e-2515-4659-baea-a6edf681545d req-370725e5-e617-4bc3-864f-7da3e8f2cc42 service nova] Acquired lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.214248] env[62519]: DEBUG nova.network.neutron [req-49b8694e-2515-4659-baea-a6edf681545d req-370725e5-e617-4bc3-864f-7da3e8f2cc42 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Refreshing network info cache for port 16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2134.304250] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803596, 'name': CreateVM_Task, 'duration_secs': 0.434144} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.304410] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2134.305012] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.305177] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.305536] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2134.305773] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-337b87ae-6abc-45f4-ac8e-ba3e31831199 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.310024] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2134.310024] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5266b4e7-f5d9-451f-d9cf-d6cff2f0ad4b" [ 2134.310024] env[62519]: _type = "Task" [ 2134.310024] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.317019] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5266b4e7-f5d9-451f-d9cf-d6cff2f0ad4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.822052] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.822316] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Processing image d4cf8f55-a2e4-4621-a02e-e1e02e813afd {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2134.822551] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd/d4cf8f55-a2e4-4621-a02e-e1e02e813afd.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.822699] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd/d4cf8f55-a2e4-4621-a02e-e1e02e813afd.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.822874] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2134.823137] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa4abecb-8463-480b-8d67-e07a9abed8cd {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.831360] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2134.831571] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2134.832334] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8fff200-387f-4fb6-a102-b23675ecd1d6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.839692] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2134.839692] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5286cd01-b8d1-acb9-a837-2ffc45c7ede9" [ 2134.839692] env[62519]: _type = "Task" [ 2134.839692] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.847218] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5286cd01-b8d1-acb9-a837-2ffc45c7ede9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.924682] env[62519]: DEBUG nova.network.neutron [req-49b8694e-2515-4659-baea-a6edf681545d req-370725e5-e617-4bc3-864f-7da3e8f2cc42 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updated VIF entry in instance network info cache for port 16e1aad6-277c-4fb9-95ca-00a66fecb425. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2134.925075] env[62519]: DEBUG nova.network.neutron [req-49b8694e-2515-4659-baea-a6edf681545d req-370725e5-e617-4bc3-864f-7da3e8f2cc42 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [{"id": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "address": "fa:16:3e:4e:44:c5", "network": {"id": "32c7792c-9705-4830-bdbc-4c4159d566f0", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-374750788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d73e71476254453fb23164dce09c6d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16e1aad6-27", "ovs_interfaceid": "16e1aad6-277c-4fb9-95ca-00a66fecb425", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.350314] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Preparing fetch location {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2135.350683] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Fetch image to [datastore1] OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93/OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93.vmdk {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2135.350733] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Downloading stream optimized image d4cf8f55-a2e4-4621-a02e-e1e02e813afd to [datastore1] OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93/OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93.vmdk on the data store datastore1 as vApp {{(pid=62519) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2135.350951] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Downloading image file data d4cf8f55-a2e4-4621-a02e-e1e02e813afd to the ESX as VM named 'OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93' {{(pid=62519) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2135.415934] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2135.415934] env[62519]: value = "resgroup-9" [ 2135.415934] env[62519]: _type = "ResourcePool" [ 2135.415934] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2135.416237] env[62519]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b126f23e-8bd8-4881-99bd-68abe6cc2caa {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.431227] env[62519]: DEBUG oslo_concurrency.lockutils [req-49b8694e-2515-4659-baea-a6edf681545d req-370725e5-e617-4bc3-864f-7da3e8f2cc42 service nova] Releasing lock "refresh_cache-812842c2-ac72-4d12-b2f7-3ccfe77a13a7" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2135.437572] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease: (returnval){ [ 2135.437572] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521fa566-7c34-93cf-0bb7-04c57c0cd852" [ 2135.437572] env[62519]: _type = "HttpNfcLease" [ 2135.437572] env[62519]: } obtained for vApp import into resource pool (val){ [ 2135.437572] env[62519]: value = "resgroup-9" [ 2135.437572] env[62519]: _type = "ResourcePool" [ 2135.437572] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2135.437914] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the lease: (returnval){ [ 2135.437914] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521fa566-7c34-93cf-0bb7-04c57c0cd852" [ 2135.437914] env[62519]: _type = "HttpNfcLease" [ 2135.437914] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2135.443795] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2135.443795] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521fa566-7c34-93cf-0bb7-04c57c0cd852" [ 2135.443795] env[62519]: _type = "HttpNfcLease" [ 2135.443795] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2135.946261] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2135.946261] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521fa566-7c34-93cf-0bb7-04c57c0cd852" [ 2135.946261] env[62519]: _type = "HttpNfcLease" [ 2135.946261] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2135.946593] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2135.946593] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]521fa566-7c34-93cf-0bb7-04c57c0cd852" [ 2135.946593] env[62519]: _type = "HttpNfcLease" [ 2135.946593] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2135.947249] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dca7b11-2f3a-4bac-93d7-1c9ccee3a7eb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.954219] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bedbc3-8999-3d6a-83c8-1151322a1cd2/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2135.954396] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bedbc3-8999-3d6a-83c8-1151322a1cd2/disk-0.vmdk. {{(pid=62519) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2136.017892] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4dcb6a44-01ff-40be-9c73-0311134d3fa9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.056845] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Completed reading data from the image iterator. {{(pid=62519) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2137.057289] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bedbc3-8999-3d6a-83c8-1151322a1cd2/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2137.058131] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b48368-68c2-42ff-a7da-acab70093559 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.064997] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bedbc3-8999-3d6a-83c8-1151322a1cd2/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2137.065192] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bedbc3-8999-3d6a-83c8-1151322a1cd2/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2137.065405] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-fa5852aa-668f-4a52-a002-b0dac51e3f5a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.253707] env[62519]: DEBUG oslo_vmware.rw_handles [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bedbc3-8999-3d6a-83c8-1151322a1cd2/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2137.253916] env[62519]: INFO nova.virt.vmwareapi.images [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Downloaded image file data d4cf8f55-a2e4-4621-a02e-e1e02e813afd [ 2137.254821] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d864d87e-8752-4e57-8057-653fdd61e693 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.270126] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-021720f7-ef11-48f3-904e-db9508fb4e6e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.295456] env[62519]: INFO nova.virt.vmwareapi.images [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] The imported VM was unregistered [ 2137.297674] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Caching image {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2137.297907] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2137.298168] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-864a103c-16ac-42c4-9729-5899455ae5b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.308366] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Created directory with path [datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2137.308541] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93/OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93.vmdk to [datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd/d4cf8f55-a2e4-4621-a02e-e1e02e813afd.vmdk. {{(pid=62519) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2137.308762] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-079213cd-1232-4e0a-947e-5bdc8986ba4a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.314917] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2137.314917] env[62519]: value = "task-1803599" [ 2137.314917] env[62519]: _type = "Task" [ 2137.314917] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.322077] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803599, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.825885] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803599, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.327877] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803599, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.826653] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803599, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.327618] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803599, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.827651] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803599, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.161038} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.828050] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93/OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93.vmdk to [datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd/d4cf8f55-a2e4-4621-a02e-e1e02e813afd.vmdk. [ 2139.828109] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Cleaning up location [datastore1] OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2139.828253] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_cbab98ea-18b9-471b-8321-87e65543fa93 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2139.828500] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92e8d6e5-8a4d-4d85-8762-0720271827f3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.834279] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2139.834279] env[62519]: value = "task-1803600" [ 2139.834279] env[62519]: _type = "Task" [ 2139.834279] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.841593] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.343366] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031211} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.343606] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2140.343765] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd/d4cf8f55-a2e4-4621-a02e-e1e02e813afd.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2140.343996] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd/d4cf8f55-a2e4-4621-a02e-e1e02e813afd.vmdk to [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7/812842c2-ac72-4d12-b2f7-3ccfe77a13a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2140.344248] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b31bcae-0722-4973-afcd-dba4e48b9b8f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.350641] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2140.350641] env[62519]: value = "task-1803601" [ 2140.350641] env[62519]: _type = "Task" [ 2140.350641] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.357608] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.860434] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803601, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.361796] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803601, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.862634] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803601, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.362374] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803601, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.496967] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.863795] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803601, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.059276} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.864177] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d4cf8f55-a2e4-4621-a02e-e1e02e813afd/d4cf8f55-a2e4-4621-a02e-e1e02e813afd.vmdk to [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7/812842c2-ac72-4d12-b2f7-3ccfe77a13a7.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2142.864799] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824bcd68-dfab-4558-b120-1929b75c5a3a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.886552] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7/812842c2-ac72-4d12-b2f7-3ccfe77a13a7.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2142.886802] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fc217d5-497d-41dd-a8f8-699fd619f312 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.906427] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2142.906427] env[62519]: value = "task-1803602" [ 2142.906427] env[62519]: _type = "Task" [ 2142.906427] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.914059] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803602, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.417043] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803602, 'name': ReconfigVM_Task, 'duration_secs': 0.311688} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.417043] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7/812842c2-ac72-4d12-b2f7-3ccfe77a13a7.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2143.417589] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7071ca8f-767b-49b2-bcdf-e96eea951ce0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.423260] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2143.423260] env[62519]: value = "task-1803603" [ 2143.423260] env[62519]: _type = "Task" [ 2143.423260] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.431675] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803603, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.933426] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803603, 'name': Rename_Task, 'duration_secs': 0.146} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.933819] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2143.933974] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5a37fd9-8a48-422f-bba7-c389525837a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.940878] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2143.940878] env[62519]: value = "task-1803604" [ 2143.940878] env[62519]: _type = "Task" [ 2143.940878] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.948440] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803604, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.451196] env[62519]: DEBUG oslo_vmware.api [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803604, 'name': PowerOnVM_Task, 'duration_secs': 0.415518} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.451517] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2144.551793] env[62519]: DEBUG nova.compute.manager [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2144.552763] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32f1744-0736-4129-941e-520d115dde40 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.068641] env[62519]: DEBUG oslo_concurrency.lockutils [None req-052b7f12-2086-4a3a-b1f2-eec61d975949 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.620s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.283511] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.283921] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.284083] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.284231] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.284426] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.286462] env[62519]: INFO nova.compute.manager [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Terminating instance [ 2146.790359] env[62519]: DEBUG nova.compute.manager [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2146.790610] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2146.791545] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0117264c-26ff-47e2-9958-d5e4da6f8b42 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.799653] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2146.799882] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff0ac694-238f-4e2f-89bb-474c8e3d3aca {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.805931] env[62519]: DEBUG oslo_vmware.api [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2146.805931] env[62519]: value = "task-1803605" [ 2146.805931] env[62519]: _type = "Task" [ 2146.805931] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.814079] env[62519]: DEBUG oslo_vmware.api [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803605, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.315729] env[62519]: DEBUG oslo_vmware.api [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803605, 'name': PowerOffVM_Task, 'duration_secs': 0.16963} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.316111] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2147.316174] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2147.316429] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-956bb3b9-9bc0-4760-91c4-9eaa210cd689 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.397633] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2147.397892] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2147.398094] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleting the datastore file [datastore1] 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2147.398369] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bced764-a2cd-4fa0-a57e-75f41d0b93b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.407445] env[62519]: DEBUG oslo_vmware.api [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for the task: (returnval){ [ 2147.407445] env[62519]: value = "task-1803607" [ 2147.407445] env[62519]: _type = "Task" [ 2147.407445] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.414640] env[62519]: DEBUG oslo_vmware.api [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.918191] env[62519]: DEBUG oslo_vmware.api [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Task: {'id': task-1803607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120076} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.918441] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2147.918625] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2147.918792] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2147.918962] env[62519]: INFO nova.compute.manager [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2147.919225] env[62519]: DEBUG oslo.service.loopingcall [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2147.919414] env[62519]: DEBUG nova.compute.manager [-] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2147.919507] env[62519]: DEBUG nova.network.neutron [-] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2148.368285] env[62519]: DEBUG nova.compute.manager [req-2811cefb-fa84-4b7c-8cac-f9acf8e56a2b req-c0cbaffc-3e36-4b41-8a89-e8c9ab558892 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Received event network-vif-deleted-16e1aad6-277c-4fb9-95ca-00a66fecb425 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2148.368520] env[62519]: INFO nova.compute.manager [req-2811cefb-fa84-4b7c-8cac-f9acf8e56a2b req-c0cbaffc-3e36-4b41-8a89-e8c9ab558892 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Neutron deleted interface 16e1aad6-277c-4fb9-95ca-00a66fecb425; detaching it from the instance and deleting it from the info cache [ 2148.368682] env[62519]: DEBUG nova.network.neutron [req-2811cefb-fa84-4b7c-8cac-f9acf8e56a2b req-c0cbaffc-3e36-4b41-8a89-e8c9ab558892 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2148.847379] env[62519]: DEBUG nova.network.neutron [-] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2148.871214] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2588fdbb-9e28-40aa-ac9f-7a939b2a9819 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.882778] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f168365f-fa5d-490b-846a-5a00836ee723 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.908013] env[62519]: DEBUG nova.compute.manager [req-2811cefb-fa84-4b7c-8cac-f9acf8e56a2b req-c0cbaffc-3e36-4b41-8a89-e8c9ab558892 service nova] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Detach interface failed, port_id=16e1aad6-277c-4fb9-95ca-00a66fecb425, reason: Instance 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2149.350918] env[62519]: INFO nova.compute.manager [-] [instance: 812842c2-ac72-4d12-b2f7-3ccfe77a13a7] Took 1.43 seconds to deallocate network for instance. [ 2149.857651] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2149.858108] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.858178] env[62519]: DEBUG nova.objects.instance [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lazy-loading 'resources' on Instance uuid 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2150.415060] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443d3916-aa25-4678-a61c-9a485d008cd6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.423560] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0f462e-7ab8-4804-ad1f-25997c3659a0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.455591] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b259791-af54-4ecd-9f1b-95399472cc5f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.463375] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bce06f2-cb07-4708-ba75-aa78f74d0223 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.477259] env[62519]: DEBUG nova.compute.provider_tree [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2150.980581] env[62519]: DEBUG nova.scheduler.client.report [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2151.485763] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2151.505915] env[62519]: INFO nova.scheduler.client.report [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Deleted allocations for instance 812842c2-ac72-4d12-b2f7-3ccfe77a13a7 [ 2152.013575] env[62519]: DEBUG oslo_concurrency.lockutils [None req-3d26e76d-e10d-4258-84c1-b6f01e769831 tempest-ServerActionsTestOtherB-1162239603 tempest-ServerActionsTestOtherB-1162239603-project-member] Lock "812842c2-ac72-4d12-b2f7-3ccfe77a13a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.730s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.418019] env[62519]: DEBUG oslo_concurrency.lockutils [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "737d119d-943f-4026-8b56-a4856efb6e87" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.418387] env[62519]: DEBUG oslo_concurrency.lockutils [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "737d119d-943f-4026-8b56-a4856efb6e87" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.418463] env[62519]: DEBUG nova.compute.manager [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2158.420741] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a114f9-9c9b-4005-ac7a-f528141abf74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.427828] env[62519]: DEBUG nova.compute.manager [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62519) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3408}} [ 2158.428382] env[62519]: DEBUG nova.objects.instance [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'flavor' on Instance uuid 737d119d-943f-4026-8b56-a4856efb6e87 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2158.880611] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "f0ebedcb-c064-4604-be62-7c7d5d419864" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.880948] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.881195] env[62519]: INFO nova.compute.manager [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Shelving [ 2159.435049] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2159.435049] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-666d788c-516f-49f2-a677-80f41f2658ad {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.442664] env[62519]: DEBUG oslo_vmware.api [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2159.442664] env[62519]: value = "task-1803609" [ 2159.442664] env[62519]: _type = "Task" [ 2159.442664] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.450920] env[62519]: DEBUG oslo_vmware.api [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803609, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.890862] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2159.891179] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4edf9873-e1b3-4603-b217-138f523c91b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.898266] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2159.898266] env[62519]: value = "task-1803610" [ 2159.898266] env[62519]: _type = "Task" [ 2159.898266] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.906703] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803610, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.953318] env[62519]: DEBUG oslo_vmware.api [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803609, 'name': PowerOffVM_Task, 'duration_secs': 0.197127} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.953618] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2159.953858] env[62519]: DEBUG nova.compute.manager [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2159.954706] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4421c90-21e2-48ea-870d-134c7db561ec {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.407978] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803610, 'name': PowerOffVM_Task, 'duration_secs': 0.155565} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.408264] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2160.409038] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75cb8a2-ad48-4b97-a36f-75364ea62d48 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.427723] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745b111a-af16-4e65-a667-dacd7f65c197 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.468555] env[62519]: DEBUG oslo_concurrency.lockutils [None req-32923d56-089e-4cea-9fce-54c7e4cc30f6 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "737d119d-943f-4026-8b56-a4856efb6e87" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.050s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.938603] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2160.939088] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-58c7ead2-058f-432a-8db8-da8962e72a41 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.947180] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2160.947180] env[62519]: value = "task-1803611" [ 2160.947180] env[62519]: _type = "Task" [ 2160.947180] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.955041] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803611, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.974147] env[62519]: DEBUG nova.objects.instance [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'flavor' on Instance uuid 737d119d-943f-4026-8b56-a4856efb6e87 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2161.460844] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803611, 'name': CreateSnapshot_Task, 'duration_secs': 0.414129} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.461255] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2161.462363] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce16666d-a962-441a-83a7-beb2435ce5fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.479189] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.479606] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.479719] env[62519]: DEBUG nova.network.neutron [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2161.480014] env[62519]: DEBUG nova.objects.instance [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'info_cache' on Instance uuid 737d119d-943f-4026-8b56-a4856efb6e87 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2161.983865] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2161.985364] env[62519]: DEBUG nova.objects.base [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Object Instance<737d119d-943f-4026-8b56-a4856efb6e87> lazy-loaded attributes: flavor,info_cache {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2161.986484] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1e752c29-9674-4dc5-b178-a76f3e9b2125 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.995493] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2161.995493] env[62519]: value = "task-1803612" [ 2161.995493] env[62519]: _type = "Task" [ 2161.995493] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.003898] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803612, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.505784] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803612, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.707596] env[62519]: DEBUG nova.network.neutron [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Updating instance_info_cache with network_info: [{"id": "90575ec3-2f97-43ef-97d9-21e1847868c9", "address": "fa:16:3e:59:fe:ac", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90575ec3-2f", "ovs_interfaceid": "90575ec3-2f97-43ef-97d9-21e1847868c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2163.006264] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803612, 'name': CloneVM_Task} progress is 95%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.210264] env[62519]: DEBUG oslo_concurrency.lockutils [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.507553] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803612, 'name': CloneVM_Task, 'duration_secs': 1.119611} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.507934] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Created linked-clone VM from snapshot [ 2163.508557] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae611def-0006-482b-ba1c-0fb14dbd1ac2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.516779] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Uploading image bd38827d-3795-4ced-9a0b-c7591065ec1e {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2163.542187] env[62519]: DEBUG oslo_vmware.rw_handles [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2163.542187] env[62519]: value = "vm-373895" [ 2163.542187] env[62519]: _type = "VirtualMachine" [ 2163.542187] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2163.542447] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6839ffca-df63-4d03-8e95-bb70ad626627 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.548597] env[62519]: DEBUG oslo_vmware.rw_handles [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lease: (returnval){ [ 2163.548597] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52efebb8-a0d4-6bf3-20ac-cea1212c3fb8" [ 2163.548597] env[62519]: _type = "HttpNfcLease" [ 2163.548597] env[62519]: } obtained for exporting VM: (result){ [ 2163.548597] env[62519]: value = "vm-373895" [ 2163.548597] env[62519]: _type = "VirtualMachine" [ 2163.548597] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2163.548921] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the lease: (returnval){ [ 2163.548921] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52efebb8-a0d4-6bf3-20ac-cea1212c3fb8" [ 2163.548921] env[62519]: _type = "HttpNfcLease" [ 2163.548921] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2163.554722] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2163.554722] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52efebb8-a0d4-6bf3-20ac-cea1212c3fb8" [ 2163.554722] env[62519]: _type = "HttpNfcLease" [ 2163.554722] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2164.057237] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2164.057237] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52efebb8-a0d4-6bf3-20ac-cea1212c3fb8" [ 2164.057237] env[62519]: _type = "HttpNfcLease" [ 2164.057237] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2164.057492] env[62519]: DEBUG oslo_vmware.rw_handles [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2164.057492] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52efebb8-a0d4-6bf3-20ac-cea1212c3fb8" [ 2164.057492] env[62519]: _type = "HttpNfcLease" [ 2164.057492] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2164.058199] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59566b7-df50-42c6-becd-1ec652f3aa4a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.065358] env[62519]: DEBUG oslo_vmware.rw_handles [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235b495-ae98-b197-bdab-aea74fcc7924/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2164.065528] env[62519]: DEBUG oslo_vmware.rw_handles [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235b495-ae98-b197-bdab-aea74fcc7924/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2164.151837] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e2774fad-62db-4fdb-991a-f2028eb7a882 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.216509] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2164.216787] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfbfa375-c265-4dfa-a1ae-329f778388dc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.223363] env[62519]: DEBUG oslo_vmware.api [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2164.223363] env[62519]: value = "task-1803614" [ 2164.223363] env[62519]: _type = "Task" [ 2164.223363] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.230800] env[62519]: DEBUG oslo_vmware.api [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.734715] env[62519]: DEBUG oslo_vmware.api [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803614, 'name': PowerOnVM_Task, 'duration_secs': 0.381773} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.735113] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2164.735346] env[62519]: DEBUG nova.compute.manager [None req-d0c7731b-b6ca-4aa3-b37f-b5c5fce775c2 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2164.736342] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fb85b5-baaf-4851-9491-e45c7a99f924 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.015652] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c2ed9b-3c23-4d4f-984f-25ad11549e0b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.023805] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-25acaffb-ab6f-422b-ad01-6de6c1809f0b tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Suspending the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2166.024074] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-4fa3d492-24b5-489c-8ac7-ad013a9e86a5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.030648] env[62519]: DEBUG oslo_vmware.api [None req-25acaffb-ab6f-422b-ad01-6de6c1809f0b tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2166.030648] env[62519]: value = "task-1803615" [ 2166.030648] env[62519]: _type = "Task" [ 2166.030648] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.038685] env[62519]: DEBUG oslo_vmware.api [None req-25acaffb-ab6f-422b-ad01-6de6c1809f0b tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803615, 'name': SuspendVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.543277] env[62519]: DEBUG oslo_vmware.api [None req-25acaffb-ab6f-422b-ad01-6de6c1809f0b tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803615, 'name': SuspendVM_Task} progress is 58%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.041433] env[62519]: DEBUG oslo_vmware.api [None req-25acaffb-ab6f-422b-ad01-6de6c1809f0b tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803615, 'name': SuspendVM_Task, 'duration_secs': 0.763759} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.041811] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-25acaffb-ab6f-422b-ad01-6de6c1809f0b tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Suspended the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2167.041948] env[62519]: DEBUG nova.compute.manager [None req-25acaffb-ab6f-422b-ad01-6de6c1809f0b tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2167.042719] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f0c262-497d-46e5-8913-1afaacd00b93 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.361691] env[62519]: INFO nova.compute.manager [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Resuming [ 2168.362433] env[62519]: DEBUG nova.objects.instance [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'flavor' on Instance uuid 737d119d-943f-4026-8b56-a4856efb6e87 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2169.874095] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.874479] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquired lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.874479] env[62519]: DEBUG nova.network.neutron [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2170.595473] env[62519]: DEBUG nova.network.neutron [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Updating instance_info_cache with network_info: [{"id": "90575ec3-2f97-43ef-97d9-21e1847868c9", "address": "fa:16:3e:59:fe:ac", "network": {"id": "40fc0e4f-fe1a-4b8c-ace4-4612cfc1a8fc", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-364463489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12977ed65a1b410a987b049e9d1dce3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e028024-a9c1-4cae-8849-ea770a7ae0e4", "external-id": "nsx-vlan-transportzone-919", "segmentation_id": 919, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90575ec3-2f", "ovs_interfaceid": "90575ec3-2f97-43ef-97d9-21e1847868c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2171.098449] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Releasing lock "refresh_cache-737d119d-943f-4026-8b56-a4856efb6e87" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.099520] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755167c2-17d9-4f26-a6e0-bad41f112f21 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.106577] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Resuming the VM {{(pid=62519) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2171.106830] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bef5a30f-604d-465b-9206-c80bdcd4275e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.113567] env[62519]: DEBUG oslo_vmware.api [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2171.113567] env[62519]: value = "task-1803616" [ 2171.113567] env[62519]: _type = "Task" [ 2171.113567] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.123723] env[62519]: DEBUG oslo_vmware.api [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.624122] env[62519]: DEBUG oslo_vmware.api [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803616, 'name': PowerOnVM_Task} progress is 93%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.125193] env[62519]: DEBUG oslo_vmware.api [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803616, 'name': PowerOnVM_Task, 'duration_secs': 0.698872} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.125639] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Resumed the VM {{(pid=62519) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2172.125732] env[62519]: DEBUG nova.compute.manager [None req-c87d5f75-afab-43e1-a791-71d7fb707bab tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2172.126707] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbffd9a-b76c-417b-95f2-add80c6470cb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.241561] env[62519]: DEBUG oslo_vmware.rw_handles [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235b495-ae98-b197-bdab-aea74fcc7924/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2172.242819] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd3433f-7c89-4c5c-b3d0-5aaf8c53172b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.249964] env[62519]: DEBUG oslo_vmware.rw_handles [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235b495-ae98-b197-bdab-aea74fcc7924/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2172.250158] env[62519]: ERROR oslo_vmware.rw_handles [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235b495-ae98-b197-bdab-aea74fcc7924/disk-0.vmdk due to incomplete transfer. [ 2172.250378] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c988ff76-6274-4e47-ae8b-e4d2bdc2283c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.257345] env[62519]: DEBUG oslo_vmware.rw_handles [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235b495-ae98-b197-bdab-aea74fcc7924/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2172.257538] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Uploaded image bd38827d-3795-4ced-9a0b-c7591065ec1e to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2172.260136] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2172.260365] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-143b53c5-097a-49cb-8442-b61799a3862b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.265938] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2172.265938] env[62519]: value = "task-1803617" [ 2172.265938] env[62519]: _type = "Task" [ 2172.265938] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.273322] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803617, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.775851] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803617, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.075457] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "737d119d-943f-4026-8b56-a4856efb6e87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2173.075730] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "737d119d-943f-4026-8b56-a4856efb6e87" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2173.075950] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "737d119d-943f-4026-8b56-a4856efb6e87-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2173.076148] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "737d119d-943f-4026-8b56-a4856efb6e87-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2173.076326] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "737d119d-943f-4026-8b56-a4856efb6e87-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2173.078511] env[62519]: INFO nova.compute.manager [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Terminating instance [ 2173.276653] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803617, 'name': Destroy_Task} progress is 33%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.582200] env[62519]: DEBUG nova.compute.manager [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2173.582499] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2173.583345] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4541761-9619-4bac-b9f0-643db024ef5b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.590898] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2173.591132] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-975468f5-55e0-4dff-9a87-2bc907424725 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.596477] env[62519]: DEBUG oslo_vmware.api [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2173.596477] env[62519]: value = "task-1803618" [ 2173.596477] env[62519]: _type = "Task" [ 2173.596477] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.603446] env[62519]: DEBUG oslo_vmware.api [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.778977] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803617, 'name': Destroy_Task, 'duration_secs': 1.298888} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.779269] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Destroyed the VM [ 2173.779514] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2173.779759] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-137fa9e2-ff95-418d-b71b-b0fb75452b68 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.786390] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2173.786390] env[62519]: value = "task-1803619" [ 2173.786390] env[62519]: _type = "Task" [ 2173.786390] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.793730] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803619, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.106504] env[62519]: DEBUG oslo_vmware.api [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.295957] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803619, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.608467] env[62519]: DEBUG oslo_vmware.api [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.797781] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803619, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.108788] env[62519]: DEBUG oslo_vmware.api [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803618, 'name': PowerOffVM_Task, 'duration_secs': 1.131747} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.108788] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2175.108957] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2175.109224] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8223cac3-bdde-4f42-9a78-fe945ad01f0c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.180892] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2175.181125] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2175.181279] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleting the datastore file [datastore1] 737d119d-943f-4026-8b56-a4856efb6e87 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2175.181534] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dabeb081-010d-49ee-8961-fe72235b4f51 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.188397] env[62519]: DEBUG oslo_vmware.api [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for the task: (returnval){ [ 2175.188397] env[62519]: value = "task-1803621" [ 2175.188397] env[62519]: _type = "Task" [ 2175.188397] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.195725] env[62519]: DEBUG oslo_vmware.api [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.297671] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803619, 'name': RemoveSnapshot_Task, 'duration_secs': 1.066547} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.298027] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2175.298266] env[62519]: DEBUG nova.compute.manager [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2175.299059] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5381cc80-0e6a-4a1d-bc0b-aec8a0393d31 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.698776] env[62519]: DEBUG oslo_vmware.api [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Task: {'id': task-1803621, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157665} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.699029] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2175.699223] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2175.699397] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2175.699567] env[62519]: INFO nova.compute.manager [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Took 2.12 seconds to destroy the instance on the hypervisor. [ 2175.699825] env[62519]: DEBUG oslo.service.loopingcall [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2175.700027] env[62519]: DEBUG nova.compute.manager [-] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2175.700128] env[62519]: DEBUG nova.network.neutron [-] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2175.810597] env[62519]: INFO nova.compute.manager [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Shelve offloading [ 2176.141932] env[62519]: DEBUG nova.compute.manager [req-589c92a9-ba1a-49c2-ba04-e5f1bf7544c2 req-be478c5f-62a4-4fc9-a370-5039b1f08fd7 service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Received event network-vif-deleted-90575ec3-2f97-43ef-97d9-21e1847868c9 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2176.142079] env[62519]: INFO nova.compute.manager [req-589c92a9-ba1a-49c2-ba04-e5f1bf7544c2 req-be478c5f-62a4-4fc9-a370-5039b1f08fd7 service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Neutron deleted interface 90575ec3-2f97-43ef-97d9-21e1847868c9; detaching it from the instance and deleting it from the info cache [ 2176.142247] env[62519]: DEBUG nova.network.neutron [req-589c92a9-ba1a-49c2-ba04-e5f1bf7544c2 req-be478c5f-62a4-4fc9-a370-5039b1f08fd7 service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.314249] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2176.314697] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-365011b7-740c-475b-b759-d31568a80d22 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.323210] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2176.323210] env[62519]: value = "task-1803622" [ 2176.323210] env[62519]: _type = "Task" [ 2176.323210] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.335202] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2176.335514] env[62519]: DEBUG nova.compute.manager [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2176.336577] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1f14dc-3686-4c01-a225-689b57fdc3b3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.343910] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2176.344171] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2176.344423] env[62519]: DEBUG nova.network.neutron [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2176.622043] env[62519]: DEBUG nova.network.neutron [-] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.645327] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a58da72-fe73-48cf-81c7-bf3259bf7521 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.655744] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696a9429-1cb6-4370-bd5d-e8b9018e436a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.680840] env[62519]: DEBUG nova.compute.manager [req-589c92a9-ba1a-49c2-ba04-e5f1bf7544c2 req-be478c5f-62a4-4fc9-a370-5039b1f08fd7 service nova] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Detach interface failed, port_id=90575ec3-2f97-43ef-97d9-21e1847868c9, reason: Instance 737d119d-943f-4026-8b56-a4856efb6e87 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2177.071525] env[62519]: DEBUG nova.network.neutron [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [{"id": "53ef6650-7c4f-4870-bdb3-217af06cf895", "address": "fa:16:3e:dc:e6:e0", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ef6650-7c", "ovs_interfaceid": "53ef6650-7c4f-4870-bdb3-217af06cf895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2177.124201] env[62519]: INFO nova.compute.manager [-] [instance: 737d119d-943f-4026-8b56-a4856efb6e87] Took 1.42 seconds to deallocate network for instance. [ 2177.574666] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.630483] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2177.630820] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2177.631051] env[62519]: DEBUG nova.objects.instance [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lazy-loading 'resources' on Instance uuid 737d119d-943f-4026-8b56-a4856efb6e87 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2177.880173] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2177.881138] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6efaa81-a36f-41be-85ad-16240c9e95ae {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.889135] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2177.889373] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1133124-09dc-44e0-bf9e-b0e9ea3f959f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.969506] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2177.969749] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2177.969910] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleting the datastore file [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2177.970203] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9fb8684-707c-41f1-a7d5-d80f2a9031c2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.977022] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2177.977022] env[62519]: value = "task-1803624" [ 2177.977022] env[62519]: _type = "Task" [ 2177.977022] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.984731] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803624, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.168602] env[62519]: DEBUG nova.compute.manager [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received event network-vif-unplugged-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2178.168829] env[62519]: DEBUG oslo_concurrency.lockutils [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] Acquiring lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.169052] env[62519]: DEBUG oslo_concurrency.lockutils [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.169231] env[62519]: DEBUG oslo_concurrency.lockutils [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.169400] env[62519]: DEBUG nova.compute.manager [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] No waiting events found dispatching network-vif-unplugged-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2178.169569] env[62519]: WARNING nova.compute.manager [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received unexpected event network-vif-unplugged-53ef6650-7c4f-4870-bdb3-217af06cf895 for instance with vm_state shelved and task_state shelving_offloading. [ 2178.169725] env[62519]: DEBUG nova.compute.manager [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received event network-changed-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2178.169949] env[62519]: DEBUG nova.compute.manager [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Refreshing instance network info cache due to event network-changed-53ef6650-7c4f-4870-bdb3-217af06cf895. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2178.170088] env[62519]: DEBUG oslo_concurrency.lockutils [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] Acquiring lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2178.170206] env[62519]: DEBUG oslo_concurrency.lockutils [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] Acquired lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2178.170360] env[62519]: DEBUG nova.network.neutron [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Refreshing network info cache for port 53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2178.183914] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddce3d81-d051-467f-b6b9-429508c92c4c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.191555] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3158195-1c3d-4295-9687-47948f3d11af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.221946] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbac73d-f276-44d8-9d4d-31c732ff33b0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.228948] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163a89d7-90d2-4d1f-a47d-ac57a731f2c5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.241647] env[62519]: DEBUG nova.compute.provider_tree [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2178.486728] env[62519]: DEBUG oslo_vmware.api [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803624, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124748} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.489219] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2178.489219] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2178.489219] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2178.508292] env[62519]: INFO nova.scheduler.client.report [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted allocations for instance f0ebedcb-c064-4604-be62-7c7d5d419864 [ 2178.744093] env[62519]: DEBUG nova.scheduler.client.report [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2178.761400] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.869334] env[62519]: DEBUG nova.network.neutron [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updated VIF entry in instance network info cache for port 53ef6650-7c4f-4870-bdb3-217af06cf895. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2178.869678] env[62519]: DEBUG nova.network.neutron [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [{"id": "53ef6650-7c4f-4870-bdb3-217af06cf895", "address": "fa:16:3e:dc:e6:e0", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap53ef6650-7c", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2179.012848] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.248216] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.617s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.251054] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.238s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.251054] env[62519]: DEBUG nova.objects.instance [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'resources' on Instance uuid f0ebedcb-c064-4604-be62-7c7d5d419864 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2179.263983] env[62519]: INFO nova.scheduler.client.report [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Deleted allocations for instance 737d119d-943f-4026-8b56-a4856efb6e87 [ 2179.371765] env[62519]: DEBUG oslo_concurrency.lockutils [req-cec29e5d-23a8-49aa-b251-9cf6f5ea4db1 req-96486792-8806-40a4-bf5b-e27a9d624442 service nova] Releasing lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2179.753463] env[62519]: DEBUG nova.objects.instance [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'numa_topology' on Instance uuid f0ebedcb-c064-4604-be62-7c7d5d419864 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2179.770958] env[62519]: DEBUG oslo_concurrency.lockutils [None req-aef961cc-8fc7-4a25-9032-caac536b3a61 tempest-ServerActionsTestJSON-347062103 tempest-ServerActionsTestJSON-347062103-project-member] Lock "737d119d-943f-4026-8b56-a4856efb6e87" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.695s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.256122] env[62519]: DEBUG nova.objects.base [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2180.270028] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4126f0-4fc3-4e87-8708-8fe130b79f87 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.281303] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd172ce-6b0c-49f2-9b04-8ab47dce633c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.314049] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7334fb8a-5c70-4987-a480-87bbcb12466f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.321773] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa31f4b-4808-4883-9afc-6c857b04d82e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.335296] env[62519]: DEBUG nova.compute.provider_tree [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2180.838457] env[62519]: DEBUG nova.scheduler.client.report [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2181.343644] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.093s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.558576] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "f0ebedcb-c064-4604-be62-7c7d5d419864" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.851298] env[62519]: DEBUG oslo_concurrency.lockutils [None req-093e4662-1773-469e-944c-01bb36796ea8 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.970s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.852291] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.294s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.852474] env[62519]: INFO nova.compute.manager [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Unshelving [ 2182.878859] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2182.879122] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2182.879328] env[62519]: DEBUG nova.objects.instance [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'pci_requests' on Instance uuid f0ebedcb-c064-4604-be62-7c7d5d419864 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2183.383733] env[62519]: DEBUG nova.objects.instance [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'numa_topology' on Instance uuid f0ebedcb-c064-4604-be62-7c7d5d419864 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2183.762349] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2183.886385] env[62519]: INFO nova.compute.claims [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2184.762150] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2184.762395] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2184.930168] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0f3898-9dfc-43be-aee3-705df7b3b396 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.937684] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31df7b19-c76c-46c1-9b6f-2301d66550d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.969232] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715d8c8b-9fe5-4f4c-91cd-113a519e839f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.977689] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c56987c-2ff9-4b35-9695-02978256b5d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.991665] env[62519]: DEBUG nova.compute.provider_tree [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2185.265764] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.495531] env[62519]: DEBUG nova.scheduler.client.report [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2185.646859] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.647101] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.002147] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.123s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.004918] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.739s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.005132] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.005298] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2186.006196] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d12417-d053-4a74-b84c-4d5fe92f6832 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.014599] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc426989-6542-4428-9d11-1bcf7b611b65 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.028560] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3346c159-eaac-4dce-8744-2632a1955b7e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.034862] env[62519]: INFO nova.network.neutron [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating port 53ef6650-7c4f-4870-bdb3-217af06cf895 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2186.037169] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5c3c78-9d98-4b00-949d-8fa0cd77b6c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.068590] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180959MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2186.068750] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.069212] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.150297] env[62519]: DEBUG nova.compute.manager [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2186.673600] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2187.098871] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance f0ebedcb-c064-4604-be62-7c7d5d419864 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2187.466342] env[62519]: DEBUG nova.compute.manager [req-72049ad9-4135-48c3-9517-70d4d937baf7 req-f3f20157-155d-42c9-a324-15e26879cff6 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received event network-vif-plugged-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2187.466506] env[62519]: DEBUG oslo_concurrency.lockutils [req-72049ad9-4135-48c3-9517-70d4d937baf7 req-f3f20157-155d-42c9-a324-15e26879cff6 service nova] Acquiring lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2187.466622] env[62519]: DEBUG oslo_concurrency.lockutils [req-72049ad9-4135-48c3-9517-70d4d937baf7 req-f3f20157-155d-42c9-a324-15e26879cff6 service nova] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2187.466743] env[62519]: DEBUG oslo_concurrency.lockutils [req-72049ad9-4135-48c3-9517-70d4d937baf7 req-f3f20157-155d-42c9-a324-15e26879cff6 service nova] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.466910] env[62519]: DEBUG nova.compute.manager [req-72049ad9-4135-48c3-9517-70d4d937baf7 req-f3f20157-155d-42c9-a324-15e26879cff6 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] No waiting events found dispatching network-vif-plugged-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2187.467089] env[62519]: WARNING nova.compute.manager [req-72049ad9-4135-48c3-9517-70d4d937baf7 req-f3f20157-155d-42c9-a324-15e26879cff6 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received unexpected event network-vif-plugged-53ef6650-7c4f-4870-bdb3-217af06cf895 for instance with vm_state shelved_offloaded and task_state spawning. [ 2187.561821] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2187.562019] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2187.562199] env[62519]: DEBUG nova.network.neutron [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2187.602075] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2187.602331] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2187.602562] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2187.640704] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3015813-1244-4fa5-8082-2b1dbd623f34 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.648756] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace632b9-8c97-4496-87fc-9f7e42893f07 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.680167] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f98b776-dd58-4244-a5aa-0ed60a254b75 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.688470] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b20f0a1-be42-49ca-82cd-877bbc315194 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.701809] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2188.205505] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2188.282454] env[62519]: DEBUG nova.network.neutron [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [{"id": "53ef6650-7c4f-4870-bdb3-217af06cf895", "address": "fa:16:3e:dc:e6:e0", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ef6650-7c", "ovs_interfaceid": "53ef6650-7c4f-4870-bdb3-217af06cf895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2188.712377] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2188.712576] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.644s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.712920] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.039s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.714514] env[62519]: INFO nova.compute.claims [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2188.784835] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2188.811745] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9110a1ce80af908900528b6eaee55857',container_format='bare',created_at=2024-12-12T08:24:02Z,direct_url=,disk_format='vmdk',id=bd38827d-3795-4ced-9a0b-c7591065ec1e,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1509065358-shelved',owner='ff4895c6c47e438e8fb9fbc0ffbfdc82',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2024-12-12T08:24:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2188.812039] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2188.812211] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2188.812399] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2188.812677] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2188.812961] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2188.813223] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2188.813425] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2188.813604] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2188.813770] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2188.813981] env[62519]: DEBUG nova.virt.hardware [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2188.814897] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f47fafc-98ce-4c85-ba42-d11b6643b5af {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.823601] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4c4ac3-5ee1-4367-891f-89e890d44866 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.836653] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:e6:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53ef6650-7c4f-4870-bdb3-217af06cf895', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2188.843974] env[62519]: DEBUG oslo.service.loopingcall [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2188.844198] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2188.844392] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddb9bf54-a2cf-4495-bba6-4d467d49d045 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.862195] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2188.862195] env[62519]: value = "task-1803625" [ 2188.862195] env[62519]: _type = "Task" [ 2188.862195] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.869214] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803625, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.373416] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803625, 'name': CreateVM_Task, 'duration_secs': 0.328414} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.373791] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2189.374385] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2189.374552] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2189.374948] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2189.375547] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bc19eee-3758-45bb-9ab3-f8bb7fc68e96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.381316] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2189.381316] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52246ff4-0b28-b634-3f64-feba15fee699" [ 2189.381316] env[62519]: _type = "Task" [ 2189.381316] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.389580] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52246ff4-0b28-b634-3f64-feba15fee699, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.496597] env[62519]: DEBUG nova.compute.manager [req-610d9f90-7e04-4814-9234-66288f78d579 req-cd69a4b3-cbda-4a9c-a4a2-c1705ff55a0a service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received event network-changed-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2189.496876] env[62519]: DEBUG nova.compute.manager [req-610d9f90-7e04-4814-9234-66288f78d579 req-cd69a4b3-cbda-4a9c-a4a2-c1705ff55a0a service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Refreshing instance network info cache due to event network-changed-53ef6650-7c4f-4870-bdb3-217af06cf895. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2189.497143] env[62519]: DEBUG oslo_concurrency.lockutils [req-610d9f90-7e04-4814-9234-66288f78d579 req-cd69a4b3-cbda-4a9c-a4a2-c1705ff55a0a service nova] Acquiring lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2189.497316] env[62519]: DEBUG oslo_concurrency.lockutils [req-610d9f90-7e04-4814-9234-66288f78d579 req-cd69a4b3-cbda-4a9c-a4a2-c1705ff55a0a service nova] Acquired lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2189.497484] env[62519]: DEBUG nova.network.neutron [req-610d9f90-7e04-4814-9234-66288f78d579 req-cd69a4b3-cbda-4a9c-a4a2-c1705ff55a0a service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Refreshing network info cache for port 53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2189.710329] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.710549] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.710713] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.710893] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 2189.761976] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.762163] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 2189.765026] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d546ede-3d60-4562-88ad-b4d2543a0669 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.772675] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a2dde8-d6ea-41c6-adc8-4717882a1849 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.802250] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9e40da-8ae8-4a43-a419-dc2b1ef3e552 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.809667] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97664480-bc07-429a-93c8-8c9d872341f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.822933] env[62519]: DEBUG nova.compute.provider_tree [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2189.890950] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2189.891218] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Processing image bd38827d-3795-4ced-9a0b-c7591065ec1e {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2189.891459] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e/bd38827d-3795-4ced-9a0b-c7591065ec1e.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2189.891609] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e/bd38827d-3795-4ced-9a0b-c7591065ec1e.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2189.891785] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2189.892056] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-852d5c36-f6e9-4fbd-8bbb-188bc18d35d9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.909093] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2189.909284] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2189.909950] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e57b8d3e-3f94-4f0e-948e-1c5f68f0c49f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.914792] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2189.914792] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5241ed9d-8e08-1b3c-5b10-7f6c141bceb0" [ 2189.914792] env[62519]: _type = "Task" [ 2189.914792] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.921620] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5241ed9d-8e08-1b3c-5b10-7f6c141bceb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.195668] env[62519]: DEBUG nova.network.neutron [req-610d9f90-7e04-4814-9234-66288f78d579 req-cd69a4b3-cbda-4a9c-a4a2-c1705ff55a0a service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updated VIF entry in instance network info cache for port 53ef6650-7c4f-4870-bdb3-217af06cf895. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2190.196037] env[62519]: DEBUG nova.network.neutron [req-610d9f90-7e04-4814-9234-66288f78d579 req-cd69a4b3-cbda-4a9c-a4a2-c1705ff55a0a service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [{"id": "53ef6650-7c4f-4870-bdb3-217af06cf895", "address": "fa:16:3e:dc:e6:e0", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ef6650-7c", "ovs_interfaceid": "53ef6650-7c4f-4870-bdb3-217af06cf895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2190.269192] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2190.325694] env[62519]: DEBUG nova.scheduler.client.report [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2190.424991] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Preparing fetch location {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2190.425310] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Fetch image to [datastore1] OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609/OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609.vmdk {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2190.425439] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Downloading stream optimized image bd38827d-3795-4ced-9a0b-c7591065ec1e to [datastore1] OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609/OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609.vmdk on the data store datastore1 as vApp {{(pid=62519) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2190.425609] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Downloading image file data bd38827d-3795-4ced-9a0b-c7591065ec1e to the ESX as VM named 'OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609' {{(pid=62519) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2190.488785] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2190.488785] env[62519]: value = "resgroup-9" [ 2190.488785] env[62519]: _type = "ResourcePool" [ 2190.488785] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2190.489031] env[62519]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-6ebc4035-aec3-40c2-852c-495981ca5d9c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.509101] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lease: (returnval){ [ 2190.509101] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d39fba-ce58-4d1d-fc6f-db429899e84d" [ 2190.509101] env[62519]: _type = "HttpNfcLease" [ 2190.509101] env[62519]: } obtained for vApp import into resource pool (val){ [ 2190.509101] env[62519]: value = "resgroup-9" [ 2190.509101] env[62519]: _type = "ResourcePool" [ 2190.509101] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2190.509470] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the lease: (returnval){ [ 2190.509470] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d39fba-ce58-4d1d-fc6f-db429899e84d" [ 2190.509470] env[62519]: _type = "HttpNfcLease" [ 2190.509470] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2190.515281] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2190.515281] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d39fba-ce58-4d1d-fc6f-db429899e84d" [ 2190.515281] env[62519]: _type = "HttpNfcLease" [ 2190.515281] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2190.699360] env[62519]: DEBUG oslo_concurrency.lockutils [req-610d9f90-7e04-4814-9234-66288f78d579 req-cd69a4b3-cbda-4a9c-a4a2-c1705ff55a0a service nova] Releasing lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2190.699766] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquired lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2190.699944] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Forcefully refreshing network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2190.831072] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.117s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.831072] env[62519]: DEBUG nova.compute.manager [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2191.017544] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2191.017544] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d39fba-ce58-4d1d-fc6f-db429899e84d" [ 2191.017544] env[62519]: _type = "HttpNfcLease" [ 2191.017544] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2191.017846] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2191.017846] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52d39fba-ce58-4d1d-fc6f-db429899e84d" [ 2191.017846] env[62519]: _type = "HttpNfcLease" [ 2191.017846] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2191.018576] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a3dfc5-743b-4078-b04c-6c0739835d05 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.026152] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52de2de6-42d7-0c69-4d26-76105577a24a/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2191.026331] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52de2de6-42d7-0c69-4d26-76105577a24a/disk-0.vmdk. {{(pid=62519) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2191.091396] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d67975c6-d722-46fb-9926-1ca5db2f473b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.336257] env[62519]: DEBUG nova.compute.utils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2191.337861] env[62519]: DEBUG nova.compute.manager [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2191.338052] env[62519]: DEBUG nova.network.neutron [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2191.387832] env[62519]: DEBUG nova.policy [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f49ba404ff8a4fabbeaece18d80a48da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4dedfa5c40474c0ea08368d61e1ff3a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2191.667465] env[62519]: DEBUG nova.network.neutron [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Successfully created port: 7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2191.842400] env[62519]: DEBUG nova.compute.manager [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2192.055874] env[62519]: DEBUG nova.network.neutron [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [{"id": "53ef6650-7c4f-4870-bdb3-217af06cf895", "address": "fa:16:3e:dc:e6:e0", "network": {"id": "1aae0533-b463-4e15-b245-e7c3c91984b5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1664918037-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff4895c6c47e438e8fb9fbc0ffbfdc82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ef6650-7c", "ovs_interfaceid": "53ef6650-7c4f-4870-bdb3-217af06cf895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2192.162099] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Completed reading data from the image iterator. {{(pid=62519) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2192.162400] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52de2de6-42d7-0c69-4d26-76105577a24a/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2192.163451] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3ab576-b5cc-4add-88e5-10202a489c87 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.170360] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52de2de6-42d7-0c69-4d26-76105577a24a/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2192.170556] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52de2de6-42d7-0c69-4d26-76105577a24a/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2192.170813] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-71884a59-f115-46b3-9310-bee359844559 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.375723] env[62519]: DEBUG oslo_vmware.rw_handles [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52de2de6-42d7-0c69-4d26-76105577a24a/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2192.375951] env[62519]: INFO nova.virt.vmwareapi.images [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Downloaded image file data bd38827d-3795-4ced-9a0b-c7591065ec1e [ 2192.376777] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f785e09-2997-4a76-bc88-3fffea858d7b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.393520] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f96257b2-17ae-4c03-b126-2bc8bba936f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.416352] env[62519]: INFO nova.virt.vmwareapi.images [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] The imported VM was unregistered [ 2192.418809] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Caching image {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2192.419081] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Creating directory with path [datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2192.419301] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e97420c0-ea3e-4777-883b-83f4959db02e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.440469] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Created directory with path [datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2192.440609] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609/OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609.vmdk to [datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e/bd38827d-3795-4ced-9a0b-c7591065ec1e.vmdk. {{(pid=62519) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2192.440851] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-b4fad516-afb3-4d34-be79-9c51127f7eb7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.447459] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2192.447459] env[62519]: value = "task-1803628" [ 2192.447459] env[62519]: _type = "Task" [ 2192.447459] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.454907] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803628, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.559569] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Releasing lock "refresh_cache-f0ebedcb-c064-4604-be62-7c7d5d419864" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2192.559845] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updated the network info_cache for instance {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10489}} [ 2192.560100] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2192.853658] env[62519]: DEBUG nova.compute.manager [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2192.885042] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2192.885337] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2192.885502] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2192.885685] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2192.885830] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2192.886011] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2192.886261] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2192.886422] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2192.886591] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2192.886756] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2192.886938] env[62519]: DEBUG nova.virt.hardware [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2192.887833] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6d13e1-ed4f-419e-9680-4063d8ad20e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.896225] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1ee49c-1b89-4baf-a7b2-16ecb9fc6c84 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.957699] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803628, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.223244] env[62519]: DEBUG nova.compute.manager [req-f4163062-df6c-40db-a26e-640f6cf40310 req-0ac237bd-2316-4daf-ad27-b4eaa9f736f1 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received event network-vif-plugged-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2193.223244] env[62519]: DEBUG oslo_concurrency.lockutils [req-f4163062-df6c-40db-a26e-640f6cf40310 req-0ac237bd-2316-4daf-ad27-b4eaa9f736f1 service nova] Acquiring lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.223244] env[62519]: DEBUG oslo_concurrency.lockutils [req-f4163062-df6c-40db-a26e-640f6cf40310 req-0ac237bd-2316-4daf-ad27-b4eaa9f736f1 service nova] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.223244] env[62519]: DEBUG oslo_concurrency.lockutils [req-f4163062-df6c-40db-a26e-640f6cf40310 req-0ac237bd-2316-4daf-ad27-b4eaa9f736f1 service nova] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.223244] env[62519]: DEBUG nova.compute.manager [req-f4163062-df6c-40db-a26e-640f6cf40310 req-0ac237bd-2316-4daf-ad27-b4eaa9f736f1 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] No waiting events found dispatching network-vif-plugged-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2193.223244] env[62519]: WARNING nova.compute.manager [req-f4163062-df6c-40db-a26e-640f6cf40310 req-0ac237bd-2316-4daf-ad27-b4eaa9f736f1 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received unexpected event network-vif-plugged-7c4c269b-6217-44c2-bdbc-b65d2acbe55c for instance with vm_state building and task_state spawning. [ 2193.319928] env[62519]: DEBUG nova.network.neutron [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Successfully updated port: 7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2193.459144] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803628, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.822244] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2193.822472] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2193.822550] env[62519]: DEBUG nova.network.neutron [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2193.960716] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803628, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.357048] env[62519]: DEBUG nova.network.neutron [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2194.459818] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803628, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.501249] env[62519]: DEBUG nova.network.neutron [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating instance_info_cache with network_info: [{"id": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "address": "fa:16:3e:45:6f:e7", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4c269b-62", "ovs_interfaceid": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2194.960815] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803628, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.254625} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.961187] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609/OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609.vmdk to [datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e/bd38827d-3795-4ced-9a0b-c7591065ec1e.vmdk. [ 2194.961319] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Cleaning up location [datastore1] OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2194.961431] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_be213187-4871-46ba-a18e-77719046f609 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2194.961677] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-494f6943-e6b9-44e4-a37e-c71608a7ff97 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.968917] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2194.968917] env[62519]: value = "task-1803629" [ 2194.968917] env[62519]: _type = "Task" [ 2194.968917] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.976230] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803629, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.003899] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2195.004234] env[62519]: DEBUG nova.compute.manager [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Instance network_info: |[{"id": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "address": "fa:16:3e:45:6f:e7", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4c269b-62", "ovs_interfaceid": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2195.004626] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:6f:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c4c269b-6217-44c2-bdbc-b65d2acbe55c', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2195.012013] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Creating folder: Project (4dedfa5c40474c0ea08368d61e1ff3a1). Parent ref: group-v373567. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2195.012284] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fbee7468-3972-4a32-b987-c70497318d18 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.022476] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Created folder: Project (4dedfa5c40474c0ea08368d61e1ff3a1) in parent group-v373567. [ 2195.022672] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Creating folder: Instances. Parent ref: group-v373898. {{(pid=62519) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2195.022884] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3020d954-f52e-4fdc-a345-1f46508499c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.032158] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Created folder: Instances in parent group-v373898. [ 2195.032390] env[62519]: DEBUG oslo.service.loopingcall [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2195.032572] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2195.032767] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cb34f3a-d472-40c1-8e41-7ea6936d326b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.052285] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2195.052285] env[62519]: value = "task-1803632" [ 2195.052285] env[62519]: _type = "Task" [ 2195.052285] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.059419] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803632, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.245963] env[62519]: DEBUG nova.compute.manager [req-8c68af59-4bee-4d25-9091-aa4559006855 req-775b8239-4e8f-4f5c-a204-9250a0040b9d service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received event network-changed-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2195.246158] env[62519]: DEBUG nova.compute.manager [req-8c68af59-4bee-4d25-9091-aa4559006855 req-775b8239-4e8f-4f5c-a204-9250a0040b9d service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Refreshing instance network info cache due to event network-changed-7c4c269b-6217-44c2-bdbc-b65d2acbe55c. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2195.246404] env[62519]: DEBUG oslo_concurrency.lockutils [req-8c68af59-4bee-4d25-9091-aa4559006855 req-775b8239-4e8f-4f5c-a204-9250a0040b9d service nova] Acquiring lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2195.246559] env[62519]: DEBUG oslo_concurrency.lockutils [req-8c68af59-4bee-4d25-9091-aa4559006855 req-775b8239-4e8f-4f5c-a204-9250a0040b9d service nova] Acquired lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2195.246721] env[62519]: DEBUG nova.network.neutron [req-8c68af59-4bee-4d25-9091-aa4559006855 req-775b8239-4e8f-4f5c-a204-9250a0040b9d service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Refreshing network info cache for port 7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2195.478988] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803629, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035576} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.479236] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2195.479401] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e/bd38827d-3795-4ced-9a0b-c7591065ec1e.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2195.479647] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e/bd38827d-3795-4ced-9a0b-c7591065ec1e.vmdk to [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864/f0ebedcb-c064-4604-be62-7c7d5d419864.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2195.479903] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-79ba9a74-e56e-4492-8b1e-33b1cbed190e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.486059] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2195.486059] env[62519]: value = "task-1803633" [ 2195.486059] env[62519]: _type = "Task" [ 2195.486059] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.493336] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803633, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.561352] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803632, 'name': CreateVM_Task, 'duration_secs': 0.390031} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.561538] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2195.562172] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2195.562356] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2195.562641] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2195.562884] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56e924ee-418d-406f-92c6-0634fd500501 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.567174] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2195.567174] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]526b46b2-8b0e-639a-2c34-f71c0fdfefab" [ 2195.567174] env[62519]: _type = "Task" [ 2195.567174] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.574450] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526b46b2-8b0e-639a-2c34-f71c0fdfefab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.960408] env[62519]: DEBUG nova.network.neutron [req-8c68af59-4bee-4d25-9091-aa4559006855 req-775b8239-4e8f-4f5c-a204-9250a0040b9d service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updated VIF entry in instance network info cache for port 7c4c269b-6217-44c2-bdbc-b65d2acbe55c. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2195.960788] env[62519]: DEBUG nova.network.neutron [req-8c68af59-4bee-4d25-9091-aa4559006855 req-775b8239-4e8f-4f5c-a204-9250a0040b9d service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating instance_info_cache with network_info: [{"id": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "address": "fa:16:3e:45:6f:e7", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4c269b-62", "ovs_interfaceid": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.996804] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803633, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2196.078336] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]526b46b2-8b0e-639a-2c34-f71c0fdfefab, 'name': SearchDatastore_Task, 'duration_secs': 0.010149} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.078663] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2196.078915] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2196.079165] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2196.079325] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2196.079534] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2196.079816] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0e76243-b88d-4168-90cc-ba41ba027a34 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.096369] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2196.096563] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2196.097356] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88508440-4425-4276-aa73-6a708f618517 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.102890] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2196.102890] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a3d726-317d-5b68-9162-60b7381736f6" [ 2196.102890] env[62519]: _type = "Task" [ 2196.102890] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.111145] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a3d726-317d-5b68-9162-60b7381736f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2196.463759] env[62519]: DEBUG oslo_concurrency.lockutils [req-8c68af59-4bee-4d25-9091-aa4559006855 req-775b8239-4e8f-4f5c-a204-9250a0040b9d service nova] Releasing lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2196.497316] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803633, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2196.613299] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52a3d726-317d-5b68-9162-60b7381736f6, 'name': SearchDatastore_Task, 'duration_secs': 0.084575} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2196.614188] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7d0aa8d-c4a0-4fa1-a4c6-cfc9e4ff1fa3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.619650] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2196.619650] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]522d9dc0-85af-c567-0e73-37d198fbeb27" [ 2196.619650] env[62519]: _type = "Task" [ 2196.619650] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.627532] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522d9dc0-85af-c567-0e73-37d198fbeb27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2196.999240] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803633, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.130627] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]522d9dc0-85af-c567-0e73-37d198fbeb27, 'name': SearchDatastore_Task, 'duration_secs': 0.08912} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2197.130868] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2197.131157] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f/62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2197.131442] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a5b8528-b73e-4a82-b5c7-31fbe8daf790 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.139171] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2197.139171] env[62519]: value = "task-1803634" [ 2197.139171] env[62519]: _type = "Task" [ 2197.139171] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2197.147523] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.498065] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803633, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.649887] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.998419] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803633, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.193993} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2197.998712] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/bd38827d-3795-4ced-9a0b-c7591065ec1e/bd38827d-3795-4ced-9a0b-c7591065ec1e.vmdk to [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864/f0ebedcb-c064-4604-be62-7c7d5d419864.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2197.999588] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f4c0f4-1a6d-4ed8-9181-0913a342fcfb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.023174] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864/f0ebedcb-c064-4604-be62-7c7d5d419864.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2198.023452] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d97d233-11f5-4a58-b0f6-10a071470f0f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.043138] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2198.043138] env[62519]: value = "task-1803635" [ 2198.043138] env[62519]: _type = "Task" [ 2198.043138] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.051238] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.150674] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803634, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.901286} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2198.150972] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f/62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2198.151179] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2198.151436] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed594a4e-42f8-4b9d-b84a-eabaeff99841 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.157734] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2198.157734] env[62519]: value = "task-1803636" [ 2198.157734] env[62519]: _type = "Task" [ 2198.157734] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.164814] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803636, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.552606] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803635, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.667518] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803636, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.286378} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2198.667795] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2198.668602] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f74195-8c83-4939-8f7c-578a00c4f01f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.689306] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f/62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2198.689533] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41971555-36d6-4ba2-8c5f-d63155c91eb8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.708697] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2198.708697] env[62519]: value = "task-1803637" [ 2198.708697] env[62519]: _type = "Task" [ 2198.708697] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.715963] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803637, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.054835] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803635, 'name': ReconfigVM_Task, 'duration_secs': 0.529526} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.055141] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Reconfigured VM instance instance-0000007b to attach disk [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864/f0ebedcb-c064-4604-be62-7c7d5d419864.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2199.055703] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20b9c106-201e-40fe-91e1-36a0b5b16b96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.061576] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2199.061576] env[62519]: value = "task-1803638" [ 2199.061576] env[62519]: _type = "Task" [ 2199.061576] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.068830] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803638, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.217854] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803637, 'name': ReconfigVM_Task, 'duration_secs': 0.291992} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.218151] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f/62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2199.218741] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04dc5396-394d-46f2-b00f-f0fb29ee1c28 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.227631] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2199.227631] env[62519]: value = "task-1803639" [ 2199.227631] env[62519]: _type = "Task" [ 2199.227631] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.234775] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803639, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.571578] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803638, 'name': Rename_Task, 'duration_secs': 0.145141} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.571839] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2199.572088] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec1686dc-c65f-4552-8be9-af065a31bffb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.579230] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2199.579230] env[62519]: value = "task-1803640" [ 2199.579230] env[62519]: _type = "Task" [ 2199.579230] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.587368] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.738015] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803639, 'name': Rename_Task, 'duration_secs': 0.13701} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.738369] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2199.738639] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f00ec38-abe7-46ae-84b4-24401f0540c6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.745337] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2199.745337] env[62519]: value = "task-1803641" [ 2199.745337] env[62519]: _type = "Task" [ 2199.745337] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.753166] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.092619] env[62519]: DEBUG oslo_vmware.api [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803640, 'name': PowerOnVM_Task, 'duration_secs': 0.426087} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.093044] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2200.188026] env[62519]: DEBUG nova.compute.manager [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2200.189060] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977d41da-790b-455d-afd8-45f88438d922 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.254708] env[62519]: DEBUG oslo_vmware.api [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803641, 'name': PowerOnVM_Task, 'duration_secs': 0.421748} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.254984] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2200.255194] env[62519]: INFO nova.compute.manager [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Took 7.40 seconds to spawn the instance on the hypervisor. [ 2200.255508] env[62519]: DEBUG nova.compute.manager [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2200.256118] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b16c8c-d640-466d-a562-618e0781ba7f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.706037] env[62519]: DEBUG oslo_concurrency.lockutils [None req-069b4915-89d2-41a1-b14b-f4eefb6d1470 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.854s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.771355] env[62519]: INFO nova.compute.manager [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Took 14.12 seconds to build instance. [ 2202.278963] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fb5cbe9d-917f-4a99-9340-7e0fe86cd5ca tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.632s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2202.288907] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "5a48be69-056c-41ac-b452-144865f3117f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2202.289141] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "5a48be69-056c-41ac-b452-144865f3117f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.792230] env[62519]: DEBUG nova.compute.manager [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Starting instance... {{(pid=62519) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2441}} [ 2203.313213] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2203.313499] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2203.315013] env[62519]: INFO nova.compute.claims [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2204.371968] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de47d8ea-eb58-460c-a1d7-dbebc68470bc {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.380274] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146db1d0-d223-49cf-ab50-1a09b1435589 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.411717] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3729f1d-6d1f-45f8-9c49-221cc8d28914 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.419117] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca250b0d-82a6-45bf-a262-f5278a7748b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.432100] env[62519]: DEBUG nova.compute.provider_tree [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2204.935131] env[62519]: DEBUG nova.scheduler.client.report [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2205.440802] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.127s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2205.441379] env[62519]: DEBUG nova.compute.manager [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Start building networks asynchronously for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2205.947629] env[62519]: DEBUG nova.compute.utils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Using /dev/sd instead of None {{(pid=62519) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2205.947629] env[62519]: DEBUG nova.compute.manager [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Allocating IP information in the background. {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1989}} [ 2205.947822] env[62519]: DEBUG nova.network.neutron [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] allocate_for_instance() {{(pid=62519) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2205.999408] env[62519]: DEBUG nova.policy [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f49ba404ff8a4fabbeaece18d80a48da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4dedfa5c40474c0ea08368d61e1ff3a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62519) authorize /opt/stack/nova/nova/policy.py:192}} [ 2206.254646] env[62519]: DEBUG nova.network.neutron [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Successfully created port: 0f7ddb34-f8bf-497e-aaee-37bb53b0eac1 {{(pid=62519) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2206.451305] env[62519]: DEBUG nova.compute.manager [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Start building block device mappings for instance. {{(pid=62519) _build_resources /opt/stack/nova/nova/compute/manager.py:2873}} [ 2207.460992] env[62519]: DEBUG nova.compute.manager [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Start spawning the instance on the hypervisor. {{(pid=62519) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2647}} [ 2207.490569] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T07:55:55Z,direct_url=,disk_format='vmdk',id=15793716-f1d9-4a86-9030-717adf498693,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4069337684d348e0a1ab4eb6a1a2b14d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T07:55:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2207.490821] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2207.490975] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2207.491176] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2207.491322] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2207.491467] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2207.491671] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2207.491857] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2207.492116] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2207.492309] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2207.492482] env[62519]: DEBUG nova.virt.hardware [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2207.493430] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007b5668-9b91-4bf6-8903-36d8d1d95983 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.502083] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac19a36c-7aee-4251-adcc-a508ac4a5aa7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.625909] env[62519]: DEBUG nova.compute.manager [req-427fa3da-fbc6-4970-9f7b-8b2d8fd622cd req-002e5e33-ace3-4803-a3e0-2563ea168915 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Received event network-vif-plugged-0f7ddb34-f8bf-497e-aaee-37bb53b0eac1 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2207.626416] env[62519]: DEBUG oslo_concurrency.lockutils [req-427fa3da-fbc6-4970-9f7b-8b2d8fd622cd req-002e5e33-ace3-4803-a3e0-2563ea168915 service nova] Acquiring lock "5a48be69-056c-41ac-b452-144865f3117f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2207.626679] env[62519]: DEBUG oslo_concurrency.lockutils [req-427fa3da-fbc6-4970-9f7b-8b2d8fd622cd req-002e5e33-ace3-4803-a3e0-2563ea168915 service nova] Lock "5a48be69-056c-41ac-b452-144865f3117f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2207.626971] env[62519]: DEBUG oslo_concurrency.lockutils [req-427fa3da-fbc6-4970-9f7b-8b2d8fd622cd req-002e5e33-ace3-4803-a3e0-2563ea168915 service nova] Lock "5a48be69-056c-41ac-b452-144865f3117f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.627423] env[62519]: DEBUG nova.compute.manager [req-427fa3da-fbc6-4970-9f7b-8b2d8fd622cd req-002e5e33-ace3-4803-a3e0-2563ea168915 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] No waiting events found dispatching network-vif-plugged-0f7ddb34-f8bf-497e-aaee-37bb53b0eac1 {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2207.627679] env[62519]: WARNING nova.compute.manager [req-427fa3da-fbc6-4970-9f7b-8b2d8fd622cd req-002e5e33-ace3-4803-a3e0-2563ea168915 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Received unexpected event network-vif-plugged-0f7ddb34-f8bf-497e-aaee-37bb53b0eac1 for instance with vm_state building and task_state spawning. [ 2207.710845] env[62519]: DEBUG nova.network.neutron [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Successfully updated port: 0f7ddb34-f8bf-497e-aaee-37bb53b0eac1 {{(pid=62519) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2208.213818] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "refresh_cache-5a48be69-056c-41ac-b452-144865f3117f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2208.215178] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "refresh_cache-5a48be69-056c-41ac-b452-144865f3117f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2208.215178] env[62519]: DEBUG nova.network.neutron [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2208.746497] env[62519]: DEBUG nova.network.neutron [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Instance cache missing network info. {{(pid=62519) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2208.869238] env[62519]: DEBUG nova.network.neutron [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Updating instance_info_cache with network_info: [{"id": "0f7ddb34-f8bf-497e-aaee-37bb53b0eac1", "address": "fa:16:3e:30:e3:aa", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f7ddb34-f8", "ovs_interfaceid": "0f7ddb34-f8bf-497e-aaee-37bb53b0eac1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2209.372066] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "refresh_cache-5a48be69-056c-41ac-b452-144865f3117f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2209.372384] env[62519]: DEBUG nova.compute.manager [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Instance network_info: |[{"id": "0f7ddb34-f8bf-497e-aaee-37bb53b0eac1", "address": "fa:16:3e:30:e3:aa", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f7ddb34-f8", "ovs_interfaceid": "0f7ddb34-f8bf-497e-aaee-37bb53b0eac1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62519) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2004}} [ 2209.372800] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:e3:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f7ddb34-f8bf-497e-aaee-37bb53b0eac1', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2209.380189] env[62519]: DEBUG oslo.service.loopingcall [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2209.380393] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2209.380620] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0fbc8ab-dec6-4185-9310-11ccdd74f14a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.401054] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2209.401054] env[62519]: value = "task-1803642" [ 2209.401054] env[62519]: _type = "Task" [ 2209.401054] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.408384] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803642, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.652444] env[62519]: DEBUG nova.compute.manager [req-0c08e9e3-6527-467f-90ae-f04902fb3dd0 req-7665c0d8-c5d5-495d-a3b8-45088b6861f3 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Received event network-changed-0f7ddb34-f8bf-497e-aaee-37bb53b0eac1 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2209.652622] env[62519]: DEBUG nova.compute.manager [req-0c08e9e3-6527-467f-90ae-f04902fb3dd0 req-7665c0d8-c5d5-495d-a3b8-45088b6861f3 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Refreshing instance network info cache due to event network-changed-0f7ddb34-f8bf-497e-aaee-37bb53b0eac1. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2209.652850] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c08e9e3-6527-467f-90ae-f04902fb3dd0 req-7665c0d8-c5d5-495d-a3b8-45088b6861f3 service nova] Acquiring lock "refresh_cache-5a48be69-056c-41ac-b452-144865f3117f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.652999] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c08e9e3-6527-467f-90ae-f04902fb3dd0 req-7665c0d8-c5d5-495d-a3b8-45088b6861f3 service nova] Acquired lock "refresh_cache-5a48be69-056c-41ac-b452-144865f3117f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.653227] env[62519]: DEBUG nova.network.neutron [req-0c08e9e3-6527-467f-90ae-f04902fb3dd0 req-7665c0d8-c5d5-495d-a3b8-45088b6861f3 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Refreshing network info cache for port 0f7ddb34-f8bf-497e-aaee-37bb53b0eac1 {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2209.911333] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803642, 'name': CreateVM_Task, 'duration_secs': 0.30065} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.911707] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2209.912127] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.912300] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.912621] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2209.913224] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-751dc7ba-1f16-4038-b1dd-3a207576186e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.917487] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2209.917487] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]525e7caf-c9cc-701e-3ccc-f5d5a0accdb1" [ 2209.917487] env[62519]: _type = "Task" [ 2209.917487] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.924956] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525e7caf-c9cc-701e-3ccc-f5d5a0accdb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.333056] env[62519]: DEBUG nova.network.neutron [req-0c08e9e3-6527-467f-90ae-f04902fb3dd0 req-7665c0d8-c5d5-495d-a3b8-45088b6861f3 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Updated VIF entry in instance network info cache for port 0f7ddb34-f8bf-497e-aaee-37bb53b0eac1. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2210.333504] env[62519]: DEBUG nova.network.neutron [req-0c08e9e3-6527-467f-90ae-f04902fb3dd0 req-7665c0d8-c5d5-495d-a3b8-45088b6861f3 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Updating instance_info_cache with network_info: [{"id": "0f7ddb34-f8bf-497e-aaee-37bb53b0eac1", "address": "fa:16:3e:30:e3:aa", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f7ddb34-f8", "ovs_interfaceid": "0f7ddb34-f8bf-497e-aaee-37bb53b0eac1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2210.427834] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]525e7caf-c9cc-701e-3ccc-f5d5a0accdb1, 'name': SearchDatastore_Task, 'duration_secs': 0.009026} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.428134] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2210.428364] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Processing image 15793716-f1d9-4a86-9030-717adf498693 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2210.428590] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2210.428735] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2210.428909] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2210.429181] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c32b79e8-3b97-4c37-ab3a-1544ed526009 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.436887] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2210.437025] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2210.437672] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c07aa0d-88e7-4fa1-abea-9e65de5b04be {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.442437] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2210.442437] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bfcdc8-f60a-98d8-c278-a36ac8733c04" [ 2210.442437] env[62519]: _type = "Task" [ 2210.442437] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.449545] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bfcdc8-f60a-98d8-c278-a36ac8733c04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.836340] env[62519]: DEBUG oslo_concurrency.lockutils [req-0c08e9e3-6527-467f-90ae-f04902fb3dd0 req-7665c0d8-c5d5-495d-a3b8-45088b6861f3 service nova] Releasing lock "refresh_cache-5a48be69-056c-41ac-b452-144865f3117f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2210.952887] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52bfcdc8-f60a-98d8-c278-a36ac8733c04, 'name': SearchDatastore_Task, 'duration_secs': 0.007831} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.953697] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1020c73-0f71-4aa7-ae02-855c4c0573c9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.958651] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2210.958651] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ded35d-c78e-803b-93fe-2014d4467891" [ 2210.958651] env[62519]: _type = "Task" [ 2210.958651] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.965867] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ded35d-c78e-803b-93fe-2014d4467891, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.469144] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52ded35d-c78e-803b-93fe-2014d4467891, 'name': SearchDatastore_Task, 'duration_secs': 0.008703} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2211.469433] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2211.469687] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 5a48be69-056c-41ac-b452-144865f3117f/5a48be69-056c-41ac-b452-144865f3117f.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2211.469942] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8645841d-8288-410e-b3fc-48dae64d1a13 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.476336] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2211.476336] env[62519]: value = "task-1803643" [ 2211.476336] env[62519]: _type = "Task" [ 2211.476336] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.485266] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.986833] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803643, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46375} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2211.987217] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/15793716-f1d9-4a86-9030-717adf498693/15793716-f1d9-4a86-9030-717adf498693.vmdk to [datastore1] 5a48be69-056c-41ac-b452-144865f3117f/5a48be69-056c-41ac-b452-144865f3117f.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2211.987351] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Extending root virtual disk to 1048576 {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2211.987548] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6179513-a18b-4e79-8dcc-645553df8790 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.994389] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2211.994389] env[62519]: value = "task-1803644" [ 2211.994389] env[62519]: _type = "Task" [ 2211.994389] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.001486] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803644, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.504315] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803644, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059103} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.504604] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Extended root virtual disk {{(pid=62519) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2212.505400] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef7aaac-8aa8-4c5b-aaf2-b00363af10f8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.526745] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 5a48be69-056c-41ac-b452-144865f3117f/5a48be69-056c-41ac-b452-144865f3117f.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2212.527028] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28448031-473c-4e00-935a-c3bd2b19017a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.546281] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2212.546281] env[62519]: value = "task-1803645" [ 2212.546281] env[62519]: _type = "Task" [ 2212.546281] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.553599] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803645, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.056578] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803645, 'name': ReconfigVM_Task, 'duration_secs': 0.289766} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2213.056920] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 5a48be69-056c-41ac-b452-144865f3117f/5a48be69-056c-41ac-b452-144865f3117f.vmdk or device None with type sparse {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2213.057509] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1adbd401-f626-4593-ab7f-c0053e643419 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.064310] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2213.064310] env[62519]: value = "task-1803646" [ 2213.064310] env[62519]: _type = "Task" [ 2213.064310] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2213.072111] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803646, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.574894] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803646, 'name': Rename_Task, 'duration_secs': 0.212064} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2213.575201] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2213.575420] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-226d5309-ada3-4dab-9b20-c0b33a9548b6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.581702] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2213.581702] env[62519]: value = "task-1803647" [ 2213.581702] env[62519]: _type = "Task" [ 2213.581702] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2214.091925] env[62519]: DEBUG oslo_vmware.api [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803647, 'name': PowerOnVM_Task, 'duration_secs': 0.492537} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2214.092317] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2214.092444] env[62519]: INFO nova.compute.manager [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Took 6.63 seconds to spawn the instance on the hypervisor. [ 2214.092577] env[62519]: DEBUG nova.compute.manager [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2214.093386] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5f43e2-36fa-4528-b8de-1791880bcb6d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.608981] env[62519]: INFO nova.compute.manager [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Took 11.31 seconds to build instance. [ 2215.339256] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "5a48be69-056c-41ac-b452-144865f3117f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.116395] env[62519]: DEBUG oslo_concurrency.lockutils [None req-fae3db22-06ff-4a0d-9b5b-82c6faf97269 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "5a48be69-056c-41ac-b452-144865f3117f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.827s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.116696] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "5a48be69-056c-41ac-b452-144865f3117f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.778s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.116928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "5a48be69-056c-41ac-b452-144865f3117f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.117154] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "5a48be69-056c-41ac-b452-144865f3117f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.117327] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "5a48be69-056c-41ac-b452-144865f3117f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.119366] env[62519]: INFO nova.compute.manager [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Terminating instance [ 2216.623954] env[62519]: DEBUG nova.compute.manager [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2216.624396] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2216.625160] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee5c7d0-a1d8-4c44-95a3-0d754a92a451 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.633247] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2216.633477] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4bebae9-6629-45a3-8d4a-55ce67ce4e9f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.639496] env[62519]: DEBUG oslo_vmware.api [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2216.639496] env[62519]: value = "task-1803648" [ 2216.639496] env[62519]: _type = "Task" [ 2216.639496] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.648758] env[62519]: DEBUG oslo_vmware.api [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.149473] env[62519]: DEBUG oslo_vmware.api [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803648, 'name': PowerOffVM_Task, 'duration_secs': 0.202864} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.150575] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2217.150575] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2217.150575] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2168b1f9-a6ac-42ac-a3bc-99f4d066e953 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.452248] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2217.452466] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2217.452649] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleting the datastore file [datastore1] 5a48be69-056c-41ac-b452-144865f3117f {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2217.452924] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c43635ba-d75f-43af-aa37-0bc54c3b26e3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.459341] env[62519]: DEBUG oslo_vmware.api [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2217.459341] env[62519]: value = "task-1803650" [ 2217.459341] env[62519]: _type = "Task" [ 2217.459341] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.466597] env[62519]: DEBUG oslo_vmware.api [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.969838] env[62519]: DEBUG oslo_vmware.api [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132754} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.970218] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2217.970218] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2217.970391] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2217.970566] env[62519]: INFO nova.compute.manager [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Took 1.35 seconds to destroy the instance on the hypervisor. [ 2217.970803] env[62519]: DEBUG oslo.service.loopingcall [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2217.971011] env[62519]: DEBUG nova.compute.manager [-] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2217.971112] env[62519]: DEBUG nova.network.neutron [-] [instance: 5a48be69-056c-41ac-b452-144865f3117f] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2218.235623] env[62519]: DEBUG nova.compute.manager [req-5ec66ad9-a56e-437e-8825-6473e80ad810 req-e137b4ac-872a-44f7-97f4-0188b9b998a7 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Received event network-vif-deleted-0f7ddb34-f8bf-497e-aaee-37bb53b0eac1 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2218.235828] env[62519]: INFO nova.compute.manager [req-5ec66ad9-a56e-437e-8825-6473e80ad810 req-e137b4ac-872a-44f7-97f4-0188b9b998a7 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Neutron deleted interface 0f7ddb34-f8bf-497e-aaee-37bb53b0eac1; detaching it from the instance and deleting it from the info cache [ 2218.236014] env[62519]: DEBUG nova.network.neutron [req-5ec66ad9-a56e-437e-8825-6473e80ad810 req-e137b4ac-872a-44f7-97f4-0188b9b998a7 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.714828] env[62519]: DEBUG nova.network.neutron [-] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.739115] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9634055-97ed-494f-81ce-1c02465f2171 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.750295] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbdf377-258d-483f-95fe-824fc1c35c4d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.777717] env[62519]: DEBUG nova.compute.manager [req-5ec66ad9-a56e-437e-8825-6473e80ad810 req-e137b4ac-872a-44f7-97f4-0188b9b998a7 service nova] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Detach interface failed, port_id=0f7ddb34-f8bf-497e-aaee-37bb53b0eac1, reason: Instance 5a48be69-056c-41ac-b452-144865f3117f could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2219.217661] env[62519]: INFO nova.compute.manager [-] [instance: 5a48be69-056c-41ac-b452-144865f3117f] Took 1.25 seconds to deallocate network for instance. [ 2219.725055] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2219.725055] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2219.725055] env[62519]: DEBUG nova.objects.instance [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lazy-loading 'resources' on Instance uuid 5a48be69-056c-41ac-b452-144865f3117f {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2220.279013] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88320041-f309-48f4-a0c7-49acf8523cee {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.286699] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c5138f-d99d-42f9-9e6d-d9e60d28c2b4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.316605] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f9985a-87b3-4d89-b2aa-10661cfd42d5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.324067] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19576c8b-f8fb-4714-acdd-2b1961be37fb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.336893] env[62519]: DEBUG nova.compute.provider_tree [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2220.839983] env[62519]: DEBUG nova.scheduler.client.report [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2221.344702] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2221.363830] env[62519]: INFO nova.scheduler.client.report [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleted allocations for instance 5a48be69-056c-41ac-b452-144865f3117f [ 2221.871471] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ea02a918-e3c4-4889-87c9-71f13b8ebc82 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "5a48be69-056c-41ac-b452-144865f3117f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.755s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.437566] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "f0ebedcb-c064-4604-be62-7c7d5d419864" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2231.437866] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2231.438046] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2231.438237] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2231.438408] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.440727] env[62519]: INFO nova.compute.manager [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Terminating instance [ 2231.944880] env[62519]: DEBUG nova.compute.manager [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2231.945290] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2231.946251] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7d1288-d797-450f-9982-84883d7fc53e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.954494] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2231.954730] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14503f93-fedf-4271-b272-9aa810e127d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.960615] env[62519]: DEBUG oslo_vmware.api [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2231.960615] env[62519]: value = "task-1803651" [ 2231.960615] env[62519]: _type = "Task" [ 2231.960615] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2231.968803] env[62519]: DEBUG oslo_vmware.api [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803651, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2232.470514] env[62519]: DEBUG oslo_vmware.api [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803651, 'name': PowerOffVM_Task, 'duration_secs': 0.193149} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2232.472049] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2232.472049] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2232.472049] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d4347e0-ae20-413b-9a6a-c7afd3afad9e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.545938] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2232.546308] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2232.546494] env[62519]: INFO nova.compute.manager [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Shelving [ 2232.566217] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2232.566430] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2232.566612] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleting the datastore file [datastore1] f0ebedcb-c064-4604-be62-7c7d5d419864 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2232.566860] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58213ae9-ed6d-47a4-a560-f6ba8ecfb97c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.574872] env[62519]: DEBUG oslo_vmware.api [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for the task: (returnval){ [ 2232.574872] env[62519]: value = "task-1803653" [ 2232.574872] env[62519]: _type = "Task" [ 2232.574872] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.582146] env[62519]: DEBUG oslo_vmware.api [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.086239] env[62519]: DEBUG oslo_vmware.api [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Task: {'id': task-1803653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13531} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2233.086499] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2233.086682] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2233.086854] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2233.087033] env[62519]: INFO nova.compute.manager [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2233.087284] env[62519]: DEBUG oslo.service.loopingcall [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2233.087471] env[62519]: DEBUG nova.compute.manager [-] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2233.087566] env[62519]: DEBUG nova.network.neutron [-] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2233.553748] env[62519]: DEBUG nova.compute.manager [req-0a982884-7194-478f-840d-6da06ffa4800 req-42d20a68-59c5-48a3-ba89-a26849b1e868 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Received event network-vif-deleted-53ef6650-7c4f-4870-bdb3-217af06cf895 {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2233.553981] env[62519]: INFO nova.compute.manager [req-0a982884-7194-478f-840d-6da06ffa4800 req-42d20a68-59c5-48a3-ba89-a26849b1e868 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Neutron deleted interface 53ef6650-7c4f-4870-bdb3-217af06cf895; detaching it from the instance and deleting it from the info cache [ 2233.554238] env[62519]: DEBUG nova.network.neutron [req-0a982884-7194-478f-840d-6da06ffa4800 req-42d20a68-59c5-48a3-ba89-a26849b1e868 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2233.558205] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2233.558673] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-580b9b53-1fe8-4374-9af3-93335e02d3f4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.566979] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2233.566979] env[62519]: value = "task-1803654" [ 2233.566979] env[62519]: _type = "Task" [ 2233.566979] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.575414] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803654, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2234.025836] env[62519]: DEBUG nova.network.neutron [-] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2234.057332] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac9e832f-f678-4a62-be99-ccf6bb90d50a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.066494] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d8a6c1-35f2-4ee9-baac-337711eccfc7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.084872] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803654, 'name': PowerOffVM_Task, 'duration_secs': 0.167643} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2234.085139] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2234.085871] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03cfbea-cf1e-4e07-b3b7-965b91213850 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.093964] env[62519]: DEBUG nova.compute.manager [req-0a982884-7194-478f-840d-6da06ffa4800 req-42d20a68-59c5-48a3-ba89-a26849b1e868 service nova] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Detach interface failed, port_id=53ef6650-7c4f-4870-bdb3-217af06cf895, reason: Instance f0ebedcb-c064-4604-be62-7c7d5d419864 could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2234.108467] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6079cddb-2863-4445-aace-b33f946097f0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.529170] env[62519]: INFO nova.compute.manager [-] [instance: f0ebedcb-c064-4604-be62-7c7d5d419864] Took 1.44 seconds to deallocate network for instance. [ 2234.620448] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Creating Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2234.620739] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bc68e56b-b404-49e2-8a81-4a778ab770e4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.629856] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2234.629856] env[62519]: value = "task-1803655" [ 2234.629856] env[62519]: _type = "Task" [ 2234.629856] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2234.638724] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803655, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.036103] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.036103] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.036103] env[62519]: DEBUG nova.objects.instance [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lazy-loading 'resources' on Instance uuid f0ebedcb-c064-4604-be62-7c7d5d419864 {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2235.140066] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803655, 'name': CreateSnapshot_Task, 'duration_secs': 0.437308} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2235.140310] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Created Snapshot of the VM instance {{(pid=62519) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2235.141057] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8eb63e-431d-4ee7-8d18-08ea4521d30c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.579769] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fae39bc-7f4b-4f1c-b0cb-bde60ce25ac0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.587197] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f3bc68-2d92-4922-8a8d-00c94b0c4bbe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.616811] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3c7fa5-efab-4fe4-b1a4-2d827934f7b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.624248] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5766e029-c783-4e8c-9c6e-5a50cd3943b9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.638227] env[62519]: DEBUG nova.compute.provider_tree [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2235.658099] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Creating linked-clone VM from snapshot {{(pid=62519) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2235.658542] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a3055f29-3709-4a85-9a9a-ee889a68e022 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.666507] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2235.666507] env[62519]: value = "task-1803656" [ 2235.666507] env[62519]: _type = "Task" [ 2235.666507] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.674617] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803656, 'name': CloneVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.141276] env[62519]: DEBUG nova.scheduler.client.report [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2236.179455] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803656, 'name': CloneVM_Task} progress is 94%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.646087] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.610s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.665382] env[62519]: INFO nova.scheduler.client.report [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Deleted allocations for instance f0ebedcb-c064-4604-be62-7c7d5d419864 [ 2236.677294] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803656, 'name': CloneVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.176022] env[62519]: DEBUG oslo_concurrency.lockutils [None req-de54f56a-93ad-4a93-af5f-5d1fef6380c6 tempest-AttachVolumeShelveTestJSON-1488627157 tempest-AttachVolumeShelveTestJSON-1488627157-project-member] Lock "f0ebedcb-c064-4604-be62-7c7d5d419864" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.738s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.181392] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803656, 'name': CloneVM_Task, 'duration_secs': 1.0493} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2237.181661] env[62519]: INFO nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Created linked-clone VM from snapshot [ 2237.182415] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd05b27d-b5f4-4afd-b855-8c49f235a204 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.189795] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Uploading image b7c22715-2f22-4f13-bc45-6e8b080fba01 {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2237.217327] env[62519]: DEBUG oslo_vmware.rw_handles [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2237.217327] env[62519]: value = "vm-373903" [ 2237.217327] env[62519]: _type = "VirtualMachine" [ 2237.217327] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2237.217595] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-916cb96b-15d6-48fe-a77f-cf522aa10340 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.225459] env[62519]: DEBUG oslo_vmware.rw_handles [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lease: (returnval){ [ 2237.225459] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527a40a5-de0a-f205-25c1-4ec9a680cc40" [ 2237.225459] env[62519]: _type = "HttpNfcLease" [ 2237.225459] env[62519]: } obtained for exporting VM: (result){ [ 2237.225459] env[62519]: value = "vm-373903" [ 2237.225459] env[62519]: _type = "VirtualMachine" [ 2237.225459] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2237.225674] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the lease: (returnval){ [ 2237.225674] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527a40a5-de0a-f205-25c1-4ec9a680cc40" [ 2237.225674] env[62519]: _type = "HttpNfcLease" [ 2237.225674] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2237.231835] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2237.231835] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527a40a5-de0a-f205-25c1-4ec9a680cc40" [ 2237.231835] env[62519]: _type = "HttpNfcLease" [ 2237.231835] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2237.734687] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2237.734687] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527a40a5-de0a-f205-25c1-4ec9a680cc40" [ 2237.734687] env[62519]: _type = "HttpNfcLease" [ 2237.734687] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2237.735102] env[62519]: DEBUG oslo_vmware.rw_handles [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2237.735102] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]527a40a5-de0a-f205-25c1-4ec9a680cc40" [ 2237.735102] env[62519]: _type = "HttpNfcLease" [ 2237.735102] env[62519]: }. {{(pid=62519) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2237.735699] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0eeae6-5eaa-49c4-9634-3c1fe9112c03 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.742870] env[62519]: DEBUG oslo_vmware.rw_handles [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52050373-366f-9c78-77b5-b22c814b9f0a/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2237.743056] env[62519]: DEBUG oslo_vmware.rw_handles [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52050373-366f-9c78-77b5-b22c814b9f0a/disk-0.vmdk for reading. {{(pid=62519) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2237.831906] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8d6f67f8-a014-41a2-b15a-43ab03e2794f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.761735] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2244.762403] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2245.265270] env[62519]: DEBUG oslo_vmware.rw_handles [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52050373-366f-9c78-77b5-b22c814b9f0a/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2245.266244] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d7a60f-2d58-4ab4-98e9-0cd25db15ee7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.272096] env[62519]: DEBUG oslo_vmware.rw_handles [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52050373-366f-9c78-77b5-b22c814b9f0a/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2245.272267] env[62519]: ERROR oslo_vmware.rw_handles [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52050373-366f-9c78-77b5-b22c814b9f0a/disk-0.vmdk due to incomplete transfer. [ 2245.272478] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-95bcf0e2-66b2-4c05-8e6e-4c55fd9f8700 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.280008] env[62519]: DEBUG oslo_vmware.rw_handles [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52050373-366f-9c78-77b5-b22c814b9f0a/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2245.280213] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Uploaded image b7c22715-2f22-4f13-bc45-6e8b080fba01 to the Glance image server {{(pid=62519) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2245.282429] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Destroying the VM {{(pid=62519) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2245.282659] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-902a314c-9f1c-4919-9859-8f194beba091 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.287941] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2245.287941] env[62519]: value = "task-1803659" [ 2245.287941] env[62519]: _type = "Task" [ 2245.287941] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.295613] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803659, 'name': Destroy_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.755963] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2245.761569] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2245.761756] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2245.798247] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803659, 'name': Destroy_Task, 'duration_secs': 0.346866} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.798575] env[62519]: INFO nova.virt.vmwareapi.vm_util [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Destroyed the VM [ 2245.798772] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Deleting Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2245.799034] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-918b9f51-dccf-4d0a-8a82-914d7c63130f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.806398] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2245.806398] env[62519]: value = "task-1803660" [ 2245.806398] env[62519]: _type = "Task" [ 2245.806398] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.813840] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803660, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.264897] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.265146] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.265317] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.265474] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2246.266359] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958c0e8a-fbc0-40d5-b65f-aab987f3d26f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.275096] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5f415b-0bf3-48a1-a704-e1fabb8cf73a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.290011] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971c0d7c-6004-497c-a63e-aed931c855d0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.296051] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639efec2-9d41-4ef5-8c79-9660209b528f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.325106] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181340MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2246.325248] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.325423] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.335031] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803660, 'name': RemoveSnapshot_Task, 'duration_secs': 0.348339} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.335152] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Deleted Snapshot of the VM instance {{(pid=62519) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2246.335366] env[62519]: DEBUG nova.compute.manager [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2246.336083] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363644ad-3b29-410d-991d-75c1988ff494 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.847435] env[62519]: INFO nova.compute.manager [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Shelve offloading [ 2247.352038] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2247.352038] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28299507-a684-4df3-aec2-f4da9b9f416b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.355662] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Instance 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62519) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2247.355976] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2247.356238] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2247.366613] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2247.366613] env[62519]: value = "task-1803661" [ 2247.366613] env[62519]: _type = "Task" [ 2247.366613] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.383763] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] VM already powered off {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2247.384150] env[62519]: DEBUG nova.compute.manager [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2247.385375] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec93926-3459-41dd-8649-f7cb6c8f8611 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.391676] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebf231a-c543-49d7-a5b3-f49e5e0c357a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.399999] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2247.400346] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2247.400658] env[62519]: DEBUG nova.network.neutron [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2247.407031] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eea7571-2a79-4862-92a8-5d4731ec4a93 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.441804] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30a5564-030b-4b4f-8fa6-56f90fd5657c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.449498] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5192e540-6a4e-48a0-8c30-873579d2e503 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.464707] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2247.967971] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2248.103244] env[62519]: DEBUG nova.network.neutron [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating instance_info_cache with network_info: [{"id": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "address": "fa:16:3e:45:6f:e7", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4c269b-62", "ovs_interfaceid": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2248.472939] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2248.473192] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.148s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.606347] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2248.843229] env[62519]: DEBUG nova.compute.manager [req-708b4bb4-5aa8-4416-bd0b-0f110b2f94a8 req-b168c8ba-1451-4693-b43e-9f78511042d4 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received event network-vif-unplugged-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2248.843431] env[62519]: DEBUG oslo_concurrency.lockutils [req-708b4bb4-5aa8-4416-bd0b-0f110b2f94a8 req-b168c8ba-1451-4693-b43e-9f78511042d4 service nova] Acquiring lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.843702] env[62519]: DEBUG oslo_concurrency.lockutils [req-708b4bb4-5aa8-4416-bd0b-0f110b2f94a8 req-b168c8ba-1451-4693-b43e-9f78511042d4 service nova] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.843885] env[62519]: DEBUG oslo_concurrency.lockutils [req-708b4bb4-5aa8-4416-bd0b-0f110b2f94a8 req-b168c8ba-1451-4693-b43e-9f78511042d4 service nova] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.844061] env[62519]: DEBUG nova.compute.manager [req-708b4bb4-5aa8-4416-bd0b-0f110b2f94a8 req-b168c8ba-1451-4693-b43e-9f78511042d4 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] No waiting events found dispatching network-vif-unplugged-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2248.844230] env[62519]: WARNING nova.compute.manager [req-708b4bb4-5aa8-4416-bd0b-0f110b2f94a8 req-b168c8ba-1451-4693-b43e-9f78511042d4 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received unexpected event network-vif-unplugged-7c4c269b-6217-44c2-bdbc-b65d2acbe55c for instance with vm_state shelved and task_state shelving_offloading. [ 2248.862430] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2248.863284] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a694cd01-feda-4d76-b0a7-f79770cc4e37 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.870742] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2248.870960] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26854f98-aa04-4134-ad69-b2a5ca1ad67e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.950321] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2248.950483] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2248.950661] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleting the datastore file [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2248.950926] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d61ad60f-d979-45f7-89fe-54bbd80231d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.957417] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2248.957417] env[62519]: value = "task-1803663" [ 2248.957417] env[62519]: _type = "Task" [ 2248.957417] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.964630] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803663, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2249.467584] env[62519]: DEBUG oslo_vmware.api [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803663, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135256} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2249.467939] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2249.468079] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2249.468231] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2249.486342] env[62519]: INFO nova.scheduler.client.report [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleted allocations for instance 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f [ 2249.991639] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2249.991936] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2249.992171] env[62519]: DEBUG nova.objects.instance [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lazy-loading 'resources' on Instance uuid 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2250.494706] env[62519]: DEBUG nova.objects.instance [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lazy-loading 'numa_topology' on Instance uuid 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2250.874201] env[62519]: DEBUG nova.compute.manager [req-3252923d-bdfc-49d5-9076-7e716b9a7c53 req-5fafb4e7-f4bb-4edc-adf9-0b4679a92e46 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received event network-changed-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2250.874419] env[62519]: DEBUG nova.compute.manager [req-3252923d-bdfc-49d5-9076-7e716b9a7c53 req-5fafb4e7-f4bb-4edc-adf9-0b4679a92e46 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Refreshing instance network info cache due to event network-changed-7c4c269b-6217-44c2-bdbc-b65d2acbe55c. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2250.874641] env[62519]: DEBUG oslo_concurrency.lockutils [req-3252923d-bdfc-49d5-9076-7e716b9a7c53 req-5fafb4e7-f4bb-4edc-adf9-0b4679a92e46 service nova] Acquiring lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2250.874786] env[62519]: DEBUG oslo_concurrency.lockutils [req-3252923d-bdfc-49d5-9076-7e716b9a7c53 req-5fafb4e7-f4bb-4edc-adf9-0b4679a92e46 service nova] Acquired lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2250.874951] env[62519]: DEBUG nova.network.neutron [req-3252923d-bdfc-49d5-9076-7e716b9a7c53 req-5fafb4e7-f4bb-4edc-adf9-0b4679a92e46 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Refreshing network info cache for port 7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2250.997570] env[62519]: DEBUG nova.objects.base [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Object Instance<62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f> lazy-loaded attributes: resources,numa_topology {{(pid=62519) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2251.027852] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a346e948-19f2-4ec4-925f-5dc405295ed2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.036493] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8b3b1f-3f05-4793-84e0-497026af84f9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.066296] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9ab48e-6914-481b-bfde-878c16ec7cd1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.074125] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202ec418-bbc6-4ac9-96bb-793f4a209297 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.087448] env[62519]: DEBUG nova.compute.provider_tree [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2251.473087] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.473267] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 2251.507906] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.570843] env[62519]: DEBUG nova.network.neutron [req-3252923d-bdfc-49d5-9076-7e716b9a7c53 req-5fafb4e7-f4bb-4edc-adf9-0b4679a92e46 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updated VIF entry in instance network info cache for port 7c4c269b-6217-44c2-bdbc-b65d2acbe55c. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2251.571201] env[62519]: DEBUG nova.network.neutron [req-3252923d-bdfc-49d5-9076-7e716b9a7c53 req-5fafb4e7-f4bb-4edc-adf9-0b4679a92e46 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating instance_info_cache with network_info: [{"id": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "address": "fa:16:3e:45:6f:e7", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": null, "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap7c4c269b-62", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2251.590575] env[62519]: DEBUG nova.scheduler.client.report [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2251.976542] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}} [ 2251.976850] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.977476] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.977476] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2251.977476] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 2252.073974] env[62519]: DEBUG oslo_concurrency.lockutils [req-3252923d-bdfc-49d5-9076-7e716b9a7c53 req-5fafb4e7-f4bb-4edc-adf9-0b4679a92e46 service nova] Releasing lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2252.096072] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.603303] env[62519]: DEBUG oslo_concurrency.lockutils [None req-c66067cb-be45-4328-bea7-bd52333d78a2 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.057s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.604253] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.096s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2252.604315] env[62519]: INFO nova.compute.manager [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Unshelving [ 2253.625212] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2253.625472] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2253.625668] env[62519]: DEBUG nova.objects.instance [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lazy-loading 'pci_requests' on Instance uuid 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2254.129373] env[62519]: DEBUG nova.objects.instance [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lazy-loading 'numa_topology' on Instance uuid 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2254.631712] env[62519]: INFO nova.compute.claims [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2255.667657] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c44d5f6-cf9a-40a9-b02c-caa5bcb05361 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.675514] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add2e12b-c888-4acb-a2ba-1648092de89d {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.705159] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a753c87-e56a-4162-ba42-eb95c1814910 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.712068] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519d3deb-994a-4587-9853-d88f7e32ada0 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.727094] env[62519]: DEBUG nova.compute.provider_tree [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2256.230219] env[62519]: DEBUG nova.scheduler.client.report [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2256.735711] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.110s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.763209] env[62519]: INFO nova.network.neutron [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating port 7c4c269b-6217-44c2-bdbc-b65d2acbe55c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2258.116122] env[62519]: DEBUG nova.compute.manager [req-85d64541-e2cc-44e3-99bf-a22aa2c1d81e req-ce49c0f0-dd06-42bd-b1cf-41be7835c198 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received event network-vif-plugged-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2258.116370] env[62519]: DEBUG oslo_concurrency.lockutils [req-85d64541-e2cc-44e3-99bf-a22aa2c1d81e req-ce49c0f0-dd06-42bd-b1cf-41be7835c198 service nova] Acquiring lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.116555] env[62519]: DEBUG oslo_concurrency.lockutils [req-85d64541-e2cc-44e3-99bf-a22aa2c1d81e req-ce49c0f0-dd06-42bd-b1cf-41be7835c198 service nova] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.116723] env[62519]: DEBUG oslo_concurrency.lockutils [req-85d64541-e2cc-44e3-99bf-a22aa2c1d81e req-ce49c0f0-dd06-42bd-b1cf-41be7835c198 service nova] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.116908] env[62519]: DEBUG nova.compute.manager [req-85d64541-e2cc-44e3-99bf-a22aa2c1d81e req-ce49c0f0-dd06-42bd-b1cf-41be7835c198 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] No waiting events found dispatching network-vif-plugged-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) pop_instance_event /opt/stack/nova/nova/compute/manager.py:323}} [ 2258.117095] env[62519]: WARNING nova.compute.manager [req-85d64541-e2cc-44e3-99bf-a22aa2c1d81e req-ce49c0f0-dd06-42bd-b1cf-41be7835c198 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received unexpected event network-vif-plugged-7c4c269b-6217-44c2-bdbc-b65d2acbe55c for instance with vm_state shelved_offloaded and task_state spawning. [ 2258.202928] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2258.203051] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2258.203248] env[62519]: DEBUG nova.network.neutron [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2258.886515] env[62519]: DEBUG nova.network.neutron [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating instance_info_cache with network_info: [{"id": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "address": "fa:16:3e:45:6f:e7", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4c269b-62", "ovs_interfaceid": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2259.389615] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2259.416981] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T07:56:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='dc0a6bd6dea1366bae39df794cc73060',container_format='bare',created_at=2024-12-12T08:25:15Z,direct_url=,disk_format='vmdk',id=b7c22715-2f22-4f13-bc45-6e8b080fba01,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-2036245122-shelved',owner='4dedfa5c40474c0ea08368d61e1ff3a1',properties=ImageMetaProps,protected=,size=31595520,status='active',tags=,updated_at=2024-12-12T08:25:29Z,virtual_size=,visibility=), allow threads: False {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2259.417263] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Flavor limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2259.417421] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Image limits 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2259.417600] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Flavor pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2259.417743] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Image pref 0:0:0 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2259.417925] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62519) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2259.418172] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2259.418332] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2259.418497] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Got 1 possible topologies {{(pid=62519) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2259.418658] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2259.418832] env[62519]: DEBUG nova.virt.hardware [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62519) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2259.419700] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da81fb15-f883-4a2f-aa18-f0c958d7b33f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.427548] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fabef0-a054-42b5-af6d-427e7021ff80 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.440741] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:6f:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c4c269b-6217-44c2-bdbc-b65d2acbe55c', 'vif_model': 'vmxnet3'}] {{(pid=62519) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2259.447938] env[62519]: DEBUG oslo.service.loopingcall [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2259.448166] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Creating VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2259.448363] env[62519]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5da690b-5acf-48d4-9b34-afd9d32625d1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.467281] env[62519]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2259.467281] env[62519]: value = "task-1803664" [ 2259.467281] env[62519]: _type = "Task" [ 2259.467281] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.474272] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803664, 'name': CreateVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.977531] env[62519]: DEBUG oslo_vmware.api [-] Task: {'id': task-1803664, 'name': CreateVM_Task, 'duration_secs': 0.396573} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2259.977714] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Created VM on the ESX host {{(pid=62519) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2259.978409] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2259.978571] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2259.978957] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2259.979216] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53c71a92-6a98-48de-8fc2-fc05c975a1c4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.983408] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2259.983408] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]5226197a-fb05-603e-4870-f882b0c15735" [ 2259.983408] env[62519]: _type = "Task" [ 2259.983408] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.990447] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]5226197a-fb05-603e-4870-f882b0c15735, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.139760] env[62519]: DEBUG nova.compute.manager [req-7ab2d7ca-8ac5-49a1-b65e-1e54264cb68e req-ffbe6a79-e8ea-4c4e-b261-0650a9a4df86 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received event network-changed-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2260.139960] env[62519]: DEBUG nova.compute.manager [req-7ab2d7ca-8ac5-49a1-b65e-1e54264cb68e req-ffbe6a79-e8ea-4c4e-b261-0650a9a4df86 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Refreshing instance network info cache due to event network-changed-7c4c269b-6217-44c2-bdbc-b65d2acbe55c. {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11628}} [ 2260.140210] env[62519]: DEBUG oslo_concurrency.lockutils [req-7ab2d7ca-8ac5-49a1-b65e-1e54264cb68e req-ffbe6a79-e8ea-4c4e-b261-0650a9a4df86 service nova] Acquiring lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.140397] env[62519]: DEBUG oslo_concurrency.lockutils [req-7ab2d7ca-8ac5-49a1-b65e-1e54264cb68e req-ffbe6a79-e8ea-4c4e-b261-0650a9a4df86 service nova] Acquired lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.140501] env[62519]: DEBUG nova.network.neutron [req-7ab2d7ca-8ac5-49a1-b65e-1e54264cb68e req-ffbe6a79-e8ea-4c4e-b261-0650a9a4df86 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Refreshing network info cache for port 7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2260.494176] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2260.494440] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Processing image b7c22715-2f22-4f13-bc45-6e8b080fba01 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2260.494654] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01/b7c22715-2f22-4f13-bc45-6e8b080fba01.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.494804] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01/b7c22715-2f22-4f13-bc45-6e8b080fba01.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.494977] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2260.495248] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba17519c-0ad8-4cb5-ac51-75dbccc97b50 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.505091] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2260.505260] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62519) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2260.506059] env[62519]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb0623bf-f368-47d7-bdc3-530b1108c3e2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.511020] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2260.511020] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]52036e04-88eb-76ae-325b-c2fff0af12e1" [ 2260.511020] env[62519]: _type = "Task" [ 2260.511020] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.518308] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': session[520dffeb-48b6-6e75-74d3-efedab77eb43]52036e04-88eb-76ae-325b-c2fff0af12e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.815986] env[62519]: DEBUG nova.network.neutron [req-7ab2d7ca-8ac5-49a1-b65e-1e54264cb68e req-ffbe6a79-e8ea-4c4e-b261-0650a9a4df86 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updated VIF entry in instance network info cache for port 7c4c269b-6217-44c2-bdbc-b65d2acbe55c. {{(pid=62519) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2260.816358] env[62519]: DEBUG nova.network.neutron [req-7ab2d7ca-8ac5-49a1-b65e-1e54264cb68e req-ffbe6a79-e8ea-4c4e-b261-0650a9a4df86 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating instance_info_cache with network_info: [{"id": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "address": "fa:16:3e:45:6f:e7", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4c269b-62", "ovs_interfaceid": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2261.021442] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Preparing fetch location {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2261.021709] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Fetch image to [datastore1] OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7/OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7.vmdk {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2261.021895] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Downloading stream optimized image b7c22715-2f22-4f13-bc45-6e8b080fba01 to [datastore1] OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7/OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7.vmdk on the data store datastore1 as vApp {{(pid=62519) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2261.023049] env[62519]: DEBUG nova.virt.vmwareapi.images [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Downloading image file data b7c22715-2f22-4f13-bc45-6e8b080fba01 to the ESX as VM named 'OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7' {{(pid=62519) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2261.084245] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2261.084245] env[62519]: value = "resgroup-9" [ 2261.084245] env[62519]: _type = "ResourcePool" [ 2261.084245] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2261.084501] env[62519]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-01f79777-0cc7-4c6f-a45a-b60dc3fa9d6a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.104022] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lease: (returnval){ [ 2261.104022] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529ad8ef-cbe7-1f46-a110-5acf30dd4ac5" [ 2261.104022] env[62519]: _type = "HttpNfcLease" [ 2261.104022] env[62519]: } obtained for vApp import into resource pool (val){ [ 2261.104022] env[62519]: value = "resgroup-9" [ 2261.104022] env[62519]: _type = "ResourcePool" [ 2261.104022] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2261.104283] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the lease: (returnval){ [ 2261.104283] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529ad8ef-cbe7-1f46-a110-5acf30dd4ac5" [ 2261.104283] env[62519]: _type = "HttpNfcLease" [ 2261.104283] env[62519]: } to be ready. {{(pid=62519) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2261.110228] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2261.110228] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529ad8ef-cbe7-1f46-a110-5acf30dd4ac5" [ 2261.110228] env[62519]: _type = "HttpNfcLease" [ 2261.110228] env[62519]: } is initializing. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2261.260098] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2261.318546] env[62519]: DEBUG oslo_concurrency.lockutils [req-7ab2d7ca-8ac5-49a1-b65e-1e54264cb68e req-ffbe6a79-e8ea-4c4e-b261-0650a9a4df86 service nova] Releasing lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2261.612594] env[62519]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2261.612594] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529ad8ef-cbe7-1f46-a110-5acf30dd4ac5" [ 2261.612594] env[62519]: _type = "HttpNfcLease" [ 2261.612594] env[62519]: } is ready. {{(pid=62519) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2261.613018] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2261.613018] env[62519]: value = "session[520dffeb-48b6-6e75-74d3-efedab77eb43]529ad8ef-cbe7-1f46-a110-5acf30dd4ac5" [ 2261.613018] env[62519]: _type = "HttpNfcLease" [ 2261.613018] env[62519]: }. {{(pid=62519) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2261.613579] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3fa9a8-57f2-4be8-a312-a1f4bce7f1bf {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.620502] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523d4e0b-fa5d-a56c-17e0-8fec587a9b02/disk-0.vmdk from lease info. {{(pid=62519) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2261.620674] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Creating HTTP connection to write to file with size = 31595520 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523d4e0b-fa5d-a56c-17e0-8fec587a9b02/disk-0.vmdk. {{(pid=62519) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2261.683792] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d8fa1448-bce2-43d1-9355-0cf739c34b6a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.697058] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Completed reading data from the image iterator. {{(pid=62519) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2262.697058] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523d4e0b-fa5d-a56c-17e0-8fec587a9b02/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2262.697523] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5455b6cb-49d1-42cd-b9dc-63e8a1de7ef9 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.704508] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523d4e0b-fa5d-a56c-17e0-8fec587a9b02/disk-0.vmdk is in state: ready. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2262.704688] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523d4e0b-fa5d-a56c-17e0-8fec587a9b02/disk-0.vmdk. {{(pid=62519) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2262.704921] env[62519]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-8a032580-8996-435c-9317-701fed54a8f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.924762] env[62519]: DEBUG oslo_vmware.rw_handles [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523d4e0b-fa5d-a56c-17e0-8fec587a9b02/disk-0.vmdk. {{(pid=62519) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2262.925023] env[62519]: INFO nova.virt.vmwareapi.images [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Downloaded image file data b7c22715-2f22-4f13-bc45-6e8b080fba01 [ 2262.925860] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39d2ef3-3f3c-4dba-88fd-58ba5241294b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.940889] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c46c09f3-3886-468f-9976-6e64425a1104 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.971864] env[62519]: INFO nova.virt.vmwareapi.images [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] The imported VM was unregistered [ 2262.974381] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Caching image {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2262.974656] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Creating directory with path [datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01 {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2262.974951] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec418a46-63c6-495d-bbcf-dcc2923e123b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.985292] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Created directory with path [datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01 {{(pid=62519) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2262.985468] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7/OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7.vmdk to [datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01/b7c22715-2f22-4f13-bc45-6e8b080fba01.vmdk. {{(pid=62519) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2262.985689] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-923ec3c9-34bc-4177-85fe-08d19a0a5c96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.992725] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2262.992725] env[62519]: value = "task-1803667" [ 2262.992725] env[62519]: _type = "Task" [ 2262.992725] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.001777] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803667, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.503113] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803667, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.003799] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803667, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.504315] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803667, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.005026] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803667, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.505680] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803667, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.131563} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.507043] env[62519]: INFO nova.virt.vmwareapi.ds_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7/OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7.vmdk to [datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01/b7c22715-2f22-4f13-bc45-6e8b080fba01.vmdk. [ 2265.507043] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Cleaning up location [datastore1] OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7 {{(pid=62519) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2265.507043] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_24029bf1-27a0-4ebb-a65a-24b0dc6ef8c7 {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2265.507043] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b52e0d9-fbe1-4ae4-8a98-62be60554476 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.513129] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2265.513129] env[62519]: value = "task-1803668" [ 2265.513129] env[62519]: _type = "Task" [ 2265.513129] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.520444] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.022587] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032963} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.022848] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2266.023016] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01/b7c22715-2f22-4f13-bc45-6e8b080fba01.vmdk" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2266.023265] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01/b7c22715-2f22-4f13-bc45-6e8b080fba01.vmdk to [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f/62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2266.023512] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9371462-53c1-42e7-8ae6-29a6a49e6c35 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.030406] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2266.030406] env[62519]: value = "task-1803669" [ 2266.030406] env[62519]: _type = "Task" [ 2266.030406] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.037427] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.541487] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803669, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.041656] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803669, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.543167] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803669, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.043465] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803669, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.545339] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803669, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.18663} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.545494] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b7c22715-2f22-4f13-bc45-6e8b080fba01/b7c22715-2f22-4f13-bc45-6e8b080fba01.vmdk to [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f/62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f.vmdk {{(pid=62519) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2268.546311] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21da970-0a69-45ff-8fb9-7b935da7447b {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.567823] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f/62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2268.568070] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc94bda7-d271-453d-9830-563f1649965e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.586177] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2268.586177] env[62519]: value = "task-1803670" [ 2268.586177] env[62519]: _type = "Task" [ 2268.586177] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.593378] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803670, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.096177] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803670, 'name': ReconfigVM_Task, 'duration_secs': 0.282832} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.096521] env[62519]: DEBUG nova.virt.vmwareapi.volumeops [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f/62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f.vmdk or device None with type streamOptimized {{(pid=62519) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2269.097117] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-feea83dd-3253-4572-8d89-9ecab569a005 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.103234] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2269.103234] env[62519]: value = "task-1803671" [ 2269.103234] env[62519]: _type = "Task" [ 2269.103234] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2269.112075] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803671, 'name': Rename_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.613014] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803671, 'name': Rename_Task, 'duration_secs': 0.26017} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.613309] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Powering on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2269.613552] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72f498dc-c3f4-40a9-93e5-c66f381108f6 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.619450] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2269.619450] env[62519]: value = "task-1803672" [ 2269.619450] env[62519]: _type = "Task" [ 2269.619450] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2269.632834] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.129523] env[62519]: DEBUG oslo_vmware.api [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803672, 'name': PowerOnVM_Task, 'duration_secs': 0.483966} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2270.129910] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Powered on the VM {{(pid=62519) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2270.249961] env[62519]: DEBUG nova.compute.manager [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2270.250939] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29cfbc85-2523-48f4-8622-93a3969dbf4a {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.767577] env[62519]: DEBUG oslo_concurrency.lockutils [None req-b935ed58-1ec0-455c-8671-47dd39a3b798 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.163s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2272.666650] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f385ea-052f-4001-8e6a-f083c92a33d3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.673890] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-af851c80-5405-4e54-a717-fc44a388d7d9 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Suspending the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2272.674155] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-78e63220-0b9d-416c-aeb8-ff59c912721e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.680433] env[62519]: DEBUG oslo_vmware.api [None req-af851c80-5405-4e54-a717-fc44a388d7d9 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2272.680433] env[62519]: value = "task-1803673" [ 2272.680433] env[62519]: _type = "Task" [ 2272.680433] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.688575] env[62519]: DEBUG oslo_vmware.api [None req-af851c80-5405-4e54-a717-fc44a388d7d9 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803673, 'name': SuspendVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.190864] env[62519]: DEBUG oslo_vmware.api [None req-af851c80-5405-4e54-a717-fc44a388d7d9 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803673, 'name': SuspendVM_Task} progress is 75%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.691091] env[62519]: DEBUG oslo_vmware.api [None req-af851c80-5405-4e54-a717-fc44a388d7d9 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803673, 'name': SuspendVM_Task, 'duration_secs': 0.564622} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.691429] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-af851c80-5405-4e54-a717-fc44a388d7d9 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Suspended the VM {{(pid=62519) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2273.691520] env[62519]: DEBUG nova.compute.manager [None req-af851c80-5405-4e54-a717-fc44a388d7d9 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2273.692325] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6124b721-c6e3-41df-8b9a-85253e3696b7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.014279] env[62519]: INFO nova.compute.manager [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Resuming [ 2275.014925] env[62519]: DEBUG nova.objects.instance [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lazy-loading 'flavor' on Instance uuid 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2276.525641] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2276.526032] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquired lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2276.526032] env[62519]: DEBUG nova.network.neutron [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Building network info cache for instance {{(pid=62519) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2277.208077] env[62519]: DEBUG nova.network.neutron [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating instance_info_cache with network_info: [{"id": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "address": "fa:16:3e:45:6f:e7", "network": {"id": "5d3d6d9a-2590-4b05-9c98-b532da472e0f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-748470245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4dedfa5c40474c0ea08368d61e1ff3a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4c269b-62", "ovs_interfaceid": "7c4c269b-6217-44c2-bdbc-b65d2acbe55c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2277.711262] env[62519]: DEBUG oslo_concurrency.lockutils [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Releasing lock "refresh_cache-62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" {{(pid=62519) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2277.712281] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630ba488-5aca-4774-8580-ca9278b878d2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.719182] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Resuming the VM {{(pid=62519) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2277.719398] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4c0af5a-df44-40b2-944b-9b21c7b929b1 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.726256] env[62519]: DEBUG oslo_vmware.api [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2277.726256] env[62519]: value = "task-1803674" [ 2277.726256] env[62519]: _type = "Task" [ 2277.726256] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2277.733999] env[62519]: DEBUG oslo_vmware.api [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803674, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.236964] env[62519]: DEBUG oslo_vmware.api [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803674, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.736852] env[62519]: DEBUG oslo_vmware.api [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803674, 'name': PowerOnVM_Task, 'duration_secs': 0.529012} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2278.737228] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Resumed the VM {{(pid=62519) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2278.737305] env[62519]: DEBUG nova.compute.manager [None req-ae23a0b8-d654-40f2-961f-44c1e53cb1d6 tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Checking state {{(pid=62519) _get_power_state /opt/stack/nova/nova/compute/manager.py:1799}} [ 2278.738057] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552eac1c-e74f-4643-a8eb-780e0774c68e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.131060] env[62519]: DEBUG oslo_concurrency.lockutils [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.131060] env[62519]: DEBUG oslo_concurrency.lockutils [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.131528] env[62519]: DEBUG oslo_concurrency.lockutils [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.131528] env[62519]: DEBUG oslo_concurrency.lockutils [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.131699] env[62519]: DEBUG oslo_concurrency.lockutils [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.134209] env[62519]: INFO nova.compute.manager [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Terminating instance [ 2283.638214] env[62519]: DEBUG nova.compute.manager [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Start destroying the instance on the hypervisor. {{(pid=62519) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3166}} [ 2283.638461] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Destroying instance {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2283.639408] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b832ca82-48a4-4ebc-9e80-96156e2c343c {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.647202] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Powering off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2283.647442] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fad34172-b111-4b17-b127-7ba1b504ad96 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.653922] env[62519]: DEBUG oslo_vmware.api [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2283.653922] env[62519]: value = "task-1803675" [ 2283.653922] env[62519]: _type = "Task" [ 2283.653922] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.661672] env[62519]: DEBUG oslo_vmware.api [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803675, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.164596] env[62519]: DEBUG oslo_vmware.api [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803675, 'name': PowerOffVM_Task, 'duration_secs': 0.196002} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.164959] env[62519]: DEBUG nova.virt.vmwareapi.vm_util [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Powered off the VM {{(pid=62519) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2284.164998] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Unregistering the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2284.165277] env[62519]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0acaccde-a287-4329-9966-caca2d650df5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.268756] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Unregistered the VM {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2284.268985] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Deleting contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2284.269191] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleting the datastore file [datastore1] 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2284.269459] env[62519]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33dcebc0-7d80-4d1f-adde-c39691eebaef {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.275784] env[62519]: DEBUG oslo_vmware.api [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for the task: (returnval){ [ 2284.275784] env[62519]: value = "task-1803677" [ 2284.275784] env[62519]: _type = "Task" [ 2284.275784] env[62519]: } to complete. {{(pid=62519) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.283351] env[62519]: DEBUG oslo_vmware.api [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803677, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.785945] env[62519]: DEBUG oslo_vmware.api [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Task: {'id': task-1803677, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134739} completed successfully. {{(pid=62519) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.786206] env[62519]: DEBUG nova.virt.vmwareapi.ds_util [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleted the datastore file {{(pid=62519) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2284.786422] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Deleted contents of the VM from datastore datastore1 {{(pid=62519) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2284.786610] env[62519]: DEBUG nova.virt.vmwareapi.vmops [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Instance destroyed {{(pid=62519) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2284.786783] env[62519]: INFO nova.compute.manager [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2284.787030] env[62519]: DEBUG oslo.service.loopingcall [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62519) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2284.787228] env[62519]: DEBUG nova.compute.manager [-] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Deallocating network for instance {{(pid=62519) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2297}} [ 2284.787326] env[62519]: DEBUG nova.network.neutron [-] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] deallocate_for_instance() {{(pid=62519) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2285.093742] env[62519]: DEBUG nova.compute.manager [req-33dc74db-e19d-4fea-b5e9-66a68d42d968 req-7d3af5ac-3de0-46d7-a439-cc1a086d9de8 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Received event network-vif-deleted-7c4c269b-6217-44c2-bdbc-b65d2acbe55c {{(pid=62519) external_instance_event /opt/stack/nova/nova/compute/manager.py:11623}} [ 2285.093977] env[62519]: INFO nova.compute.manager [req-33dc74db-e19d-4fea-b5e9-66a68d42d968 req-7d3af5ac-3de0-46d7-a439-cc1a086d9de8 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Neutron deleted interface 7c4c269b-6217-44c2-bdbc-b65d2acbe55c; detaching it from the instance and deleting it from the info cache [ 2285.094212] env[62519]: DEBUG nova.network.neutron [req-33dc74db-e19d-4fea-b5e9-66a68d42d968 req-7d3af5ac-3de0-46d7-a439-cc1a086d9de8 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2285.550510] env[62519]: DEBUG nova.network.neutron [-] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Updating instance_info_cache with network_info: [] {{(pid=62519) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2285.596245] env[62519]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f613ed24-eafe-4d79-a5ea-3e8233af44a3 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.606888] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000a2a60-4af7-4a0a-bba3-95d4b342bd37 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.631161] env[62519]: DEBUG nova.compute.manager [req-33dc74db-e19d-4fea-b5e9-66a68d42d968 req-7d3af5ac-3de0-46d7-a439-cc1a086d9de8 service nova] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Detach interface failed, port_id=7c4c269b-6217-44c2-bdbc-b65d2acbe55c, reason: Instance 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f could not be found. {{(pid=62519) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11455}} [ 2286.054054] env[62519]: INFO nova.compute.manager [-] [instance: 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f] Took 1.27 seconds to deallocate network for instance. [ 2286.560271] env[62519]: DEBUG oslo_concurrency.lockutils [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2286.560614] env[62519]: DEBUG oslo_concurrency.lockutils [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.560755] env[62519]: DEBUG nova.objects.instance [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lazy-loading 'resources' on Instance uuid 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f {{(pid=62519) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2287.097411] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87800799-6da4-47d9-80f1-4ba12860455e {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.105580] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a56216-f10f-4198-b9dc-4ebbf3f2aea4 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.134552] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c939289-61b6-475c-89b9-8f0814b5d1fe {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.141181] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6e2f06-befb-402e-94a8-96d530f341c8 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.155540] env[62519]: DEBUG nova.compute.provider_tree [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2287.659154] env[62519]: DEBUG nova.scheduler.client.report [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2288.164401] env[62519]: DEBUG oslo_concurrency.lockutils [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.604s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2288.182349] env[62519]: INFO nova.scheduler.client.report [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Deleted allocations for instance 62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f [ 2288.690392] env[62519]: DEBUG oslo_concurrency.lockutils [None req-746c57ad-5455-4689-9e6b-6f9940be577f tempest-ServersNegativeTestJSON-696369775 tempest-ServersNegativeTestJSON-696369775-project-member] Lock "62ce8eb2-a8be-47b6-9aa1-db19ac1abc7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.559s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2300.762200] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2305.763055] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2305.763055] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2307.756592] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2307.763065] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager.update_available_resource {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.266138] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2308.266538] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2308.266816] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2308.267091] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62519) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2308.268464] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f50a2a-e4d3-437f-8033-bc0f40b560bb {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.280946] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97938225-0601-433c-a171-e8cf5c54d0ce {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.298119] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785889e7-75c9-45bc-a3ff-8b80023d4aa2 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.304900] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951b90ae-d928-403d-b917-bf6f0c8ba482 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.335468] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181352MB free_disk=159GB free_vcpus=48 pci_devices=None {{(pid=62519) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2308.335695] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2308.335851] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.356886] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2309.357121] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62519) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2309.372304] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408b3a88-aa05-4c86-9c31-5d7d99d7ba74 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.379808] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0e93c7-0ad8-4354-9c2b-418c435c2cc5 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.409211] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da008f01-3d8e-44c0-8b02-3af5dc1e367f {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.415968] env[62519]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01507e3f-f0eb-4704-8305-7a67079a9dc7 {{(pid=62519) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.429755] env[62519]: DEBUG nova.compute.provider_tree [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed in ProviderTree for provider: f8ca0d98-9158-4b85-ae0e-b106f966dd44 {{(pid=62519) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2309.933082] env[62519]: DEBUG nova.scheduler.client.report [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Inventory has not changed for provider f8ca0d98-9158-4b85-ae0e-b106f966dd44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62519) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2310.438278] env[62519]: DEBUG nova.compute.resource_tracker [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62519) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2310.438622] env[62519]: DEBUG oslo_concurrency.lockutils [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.103s {{(pid=62519) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2311.438342] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2311.438877] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2311.438877] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62519) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11037}} [ 2311.762058] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.762398] env[62519]: DEBUG oslo_service.periodic_task [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62519) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.762782] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Starting heal instance info cache {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10418}} [ 2312.762782] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Rebuilding the list of instances to heal {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10422}} [ 2313.265517] env[62519]: DEBUG nova.compute.manager [None req-2e346fd1-4f93-4335-96df-208436219ecf None None] Didn't find any instances for network info cache update. {{(pid=62519) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10504}}